Compare commits
108 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0cdc7a3af5 | ||
|
|
1cfe155a3a | ||
|
|
2923cbc645 | ||
|
|
7c209cc9dc | ||
|
|
84fa4ce432 | ||
|
|
f2e9cd9668 | ||
|
|
77049d6cbb | ||
|
|
b4c23c158b | ||
|
|
964b4aa389 | ||
|
|
dc5aa35db7 | ||
|
|
43c05d06fc | ||
|
|
a3f7d1d7e7 | ||
|
|
bb4a1ca6c2 | ||
|
|
57cce640e1 | ||
|
|
1eb5d36668 | ||
|
|
6bc4850596 | ||
|
|
24005ae7ae | ||
|
|
7aa296bb57 | ||
|
|
3829ed2f8e | ||
|
|
2b7294a504 | ||
|
|
0c6a892893 | ||
|
|
89d94ad85a | ||
|
|
ffdb78962f | ||
|
|
321dae37ce | ||
|
|
a31797af0b | ||
|
|
32999cf432 | ||
|
|
88218f5d92 | ||
|
|
15761933ac | ||
|
|
0b62842f0e | ||
|
|
6bceddeeda | ||
|
|
2dcbff8cd5 | ||
|
|
8659668177 | ||
|
|
e07b6a9160 | ||
|
|
aac5ef1438 | ||
|
|
d780a73297 | ||
|
|
9ef8cee36e | ||
|
|
77808a2c05 | ||
|
|
177e553d12 | ||
|
|
40f8272a28 | ||
|
|
a7eb1141ae | ||
|
|
c73ed7f32f | ||
|
|
f047a6fe0c | ||
|
|
7f15a86d6a | ||
|
|
da1e515253 | ||
|
|
591786fde6 | ||
|
|
47e6ea249d | ||
|
|
4a72295de7 | ||
|
|
9ed5f2cac5 | ||
|
|
3e67f04fe4 | ||
|
|
b9416ae062 | ||
|
|
b4e49e093e | ||
|
|
020f6ac609 | ||
|
|
7e71cbdd46 | ||
|
|
1003f62212 | ||
|
|
9b18e1f9f0 | ||
|
|
24f790f474 | ||
|
|
fb8749fc5e | ||
|
|
96c3592db1 | ||
|
|
d65421cf46 | ||
|
|
c52ba448cd | ||
|
|
21adce463b | ||
|
|
f24240bf90 | ||
|
|
ff83cadd6e | ||
|
|
e8c09282d9 | ||
|
|
5f4d68cde4 | ||
|
|
9077a83ea8 | ||
|
|
543dc99ecd | ||
|
|
f0b3a8b1db | ||
|
|
0b9ec05181 | ||
|
|
0bf12412d6 | ||
|
|
0ea4d58c63 | ||
|
|
5755b00576 | ||
|
|
1c8e074c9d | ||
|
|
0e0e5ce4be | ||
|
|
23dfe53885 | ||
|
|
8e6351a9e4 | ||
|
|
3086e2760f | ||
|
|
b8db2e0b74 | ||
|
|
43b46cb324 | ||
|
|
d0559c7719 | ||
|
|
231c63cf62 | ||
|
|
2a9aebe059 | ||
|
|
4e535d792f | ||
|
|
4b487503d4 | ||
|
|
0095c40e69 | ||
|
|
82c1abfd3a | ||
|
|
40988401bd | ||
|
|
e8e3f4d138 | ||
|
|
7eb77f5b51 | ||
|
|
e115235299 | ||
|
|
151d4b2d30 | ||
|
|
e553f8b4c5 | ||
|
|
47652ef0fb | ||
|
|
ab0e950800 | ||
|
|
a7b0ce1c85 | ||
|
|
dc9c0edece | ||
|
|
17ae386d1e | ||
|
|
2d369d0cfe | ||
|
|
c36e645d9b | ||
|
|
40039c07e2 | ||
|
|
a692cec0ef | ||
|
|
e7ca491a94 | ||
|
|
23f3e2fc11 | ||
|
|
27b3e17b79 | ||
|
|
740781af56 | ||
|
|
36c9c229b8 | ||
|
|
183fdcbdef | ||
|
|
a2a697900a |
12
.github/dependabot.yml
vendored
Normal file
12
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "master"
|
||||
45
.github/workflows/docker-publish.yml
vendored
Normal file
45
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Publish Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: vuls/vuls
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
vuls/vuls:latest
|
||||
${{ steps.meta.outputs.tags }}
|
||||
secrets: |
|
||||
"github_token=${{ secrets.GITHUB_TOKEN }}"
|
||||
2
.github/workflows/golangci.yml
vendored
2
.github/workflows/golangci.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
|
||||
version: v1.32
|
||||
version: v1.42
|
||||
args: --timeout=10m
|
||||
|
||||
# Optional: working directory, useful for monorepos
|
||||
|
||||
22
.github/workflows/tidy.yml
vendored
22
.github/workflows/tidy.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: go-mod-tidy-pr
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Weekly build
|
||||
|
||||
jobs:
|
||||
go-mod-tidy-pr:
|
||||
name: go-mod-tidy-pr
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Run go-mod-tidy-pr
|
||||
uses: sue445/go-mod-tidy-pr@master
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
git_user_name: kotakanbe
|
||||
git_user_email: kotakanbe@gmail.com
|
||||
go_version: 1.16.x
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
.vscode
|
||||
*.txt
|
||||
*.swp
|
||||
*.sqlite3*
|
||||
*.db
|
||||
tags
|
||||
@@ -8,9 +9,12 @@ coverage.out
|
||||
issues/
|
||||
vendor/
|
||||
log/
|
||||
results/
|
||||
results
|
||||
config.toml
|
||||
!setup/docker/*
|
||||
.DS_Store
|
||||
dist/
|
||||
.idea
|
||||
vuls.*
|
||||
vuls
|
||||
!cmd/vuls
|
||||
|
||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "integration"]
|
||||
path = integration
|
||||
url = https://github.com/vulsio/integration
|
||||
@@ -1,14 +1,44 @@
|
||||
name: golang-ci
|
||||
|
||||
linters-settings:
|
||||
errcheck:
|
||||
revive:
|
||||
# see https://github.com/mgechev/revive#available-rules for details.
|
||||
ignore-generated-header: true
|
||||
severity: warning
|
||||
confidence: 0.8
|
||||
rules:
|
||||
- name: blank-imports
|
||||
- name: context-as-argument
|
||||
- name: context-keys-type
|
||||
- name: dot-imports
|
||||
- name: error-return
|
||||
- name: error-strings
|
||||
- name: error-naming
|
||||
- name: exported
|
||||
- name: if-return
|
||||
- name: increment-decrement
|
||||
- name: var-naming
|
||||
- name: var-declaration
|
||||
- name: package-comments
|
||||
- name: range
|
||||
- name: receiver-naming
|
||||
- name: time-naming
|
||||
- name: unexported-return
|
||||
- name: indent-error-flow
|
||||
- name: errorf
|
||||
- name: empty-block
|
||||
- name: superfluous-else
|
||||
- name: unused-parameter
|
||||
- name: unreachable-code
|
||||
- name: redefines-builtin-id
|
||||
# errcheck:
|
||||
#exclude: /path/to/file.txt
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- goimports
|
||||
- golint
|
||||
- revive
|
||||
- govet
|
||||
- misspell
|
||||
- errcheck
|
||||
|
||||
@@ -31,7 +31,8 @@ builds:
|
||||
main: ./cmd/scanner/main.go
|
||||
flags:
|
||||
- -a
|
||||
- -tags=scanner
|
||||
tags:
|
||||
- scanner
|
||||
ldflags:
|
||||
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
|
||||
binary: vuls-scanner
|
||||
@@ -46,6 +47,8 @@ builds:
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
tags:
|
||||
- scanner
|
||||
main: ./contrib/trivy/cmd/main.go
|
||||
binary: trivy-to-vuls
|
||||
|
||||
@@ -61,7 +64,8 @@ builds:
|
||||
- arm64
|
||||
flags:
|
||||
- -a
|
||||
- -tags=scanner
|
||||
tags:
|
||||
- scanner
|
||||
main: ./contrib/future-vuls/cmd/main.go
|
||||
binary: future-vuls
|
||||
|
||||
|
||||
30
.revive.toml
Normal file
30
.revive.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
ignoreGeneratedHeader = false
|
||||
severity = "warning"
|
||||
confidence = 0.8
|
||||
errorCode = 0
|
||||
warningCode = 0
|
||||
|
||||
[rule.blank-imports]
|
||||
[rule.context-as-argument]
|
||||
[rule.context-keys-type]
|
||||
[rule.dot-imports]
|
||||
[rule.error-return]
|
||||
[rule.error-strings]
|
||||
[rule.error-naming]
|
||||
[rule.exported]
|
||||
[rule.if-return]
|
||||
[rule.increment-decrement]
|
||||
[rule.var-naming]
|
||||
[rule.var-declaration]
|
||||
[rule.package-comments]
|
||||
[rule.range]
|
||||
[rule.receiver-naming]
|
||||
[rule.time-naming]
|
||||
[rule.unexported-return]
|
||||
[rule.indent-error-flow]
|
||||
[rule.errorf]
|
||||
[rule.empty-block]
|
||||
[rule.superfluous-else]
|
||||
[rule.unused-parameter]
|
||||
[rule.unreachable-code]
|
||||
[rule.redefines-builtin-id]
|
||||
@@ -10,10 +10,7 @@ ENV REPOSITORY github.com/future-architect/vuls
|
||||
COPY . $GOPATH/src/$REPOSITORY
|
||||
RUN cd $GOPATH/src/$REPOSITORY && make install
|
||||
|
||||
|
||||
FROM alpine:3.11
|
||||
|
||||
MAINTAINER hikachan sadayuki-matsuno
|
||||
FROM alpine:3.14
|
||||
|
||||
ENV LOGDIR /var/log/vuls
|
||||
ENV WORKDIR /vuls
|
||||
@@ -22,6 +19,7 @@ RUN apk add --no-cache \
|
||||
openssh-client \
|
||||
ca-certificates \
|
||||
git \
|
||||
nmap \
|
||||
&& mkdir -p $WORKDIR $LOGDIR
|
||||
|
||||
COPY --from=builder /go/bin/vuls /usr/local/bin/
|
||||
|
||||
165
GNUmakefile
165
GNUmakefile
@@ -17,14 +17,13 @@ PKGS = $(shell go list ./...)
|
||||
VERSION := $(shell git describe --tags --abbrev=0)
|
||||
REVISION := $(shell git rev-parse --short HEAD)
|
||||
BUILDTIME := $(shell date "+%Y%m%d_%H%M%S")
|
||||
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' \
|
||||
-X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
|
||||
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' -X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
|
||||
GO := GO111MODULE=on go
|
||||
CGO_UNABLED := CGO_ENABLED=0 go
|
||||
GO_OFF := GO111MODULE=off go
|
||||
|
||||
|
||||
all: build
|
||||
all: b
|
||||
|
||||
build: ./cmd/vuls/main.go pretest fmt
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
@@ -32,22 +31,25 @@ build: ./cmd/vuls/main.go pretest fmt
|
||||
b: ./cmd/vuls/main.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
|
||||
|
||||
install: ./cmd/vuls/main.go pretest fmt
|
||||
install: ./cmd/vuls/main.go
|
||||
$(GO) install -ldflags "$(LDFLAGS)" ./cmd/vuls
|
||||
|
||||
build-scanner: ./cmd/scanner/main.go pretest fmt
|
||||
build-scanner: ./cmd/scanner/main.go
|
||||
$(CGO_UNABLED) build -tags=scanner -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/scanner
|
||||
|
||||
install-scanner: ./cmd/scanner/main.go pretest fmt
|
||||
install-scanner: ./cmd/scanner/main.go
|
||||
$(CGO_UNABLED) install -tags=scanner -ldflags "$(LDFLAGS)" ./cmd/scanner
|
||||
|
||||
lint:
|
||||
$(GO_OFF) get -u golang.org/x/lint/golint
|
||||
golint $(PKGS)
|
||||
$(GO_OFF) get -u github.com/mgechev/revive
|
||||
revive -config ./.revive.toml -formatter plain $(PKGS)
|
||||
|
||||
vet:
|
||||
echo $(PKGS) | xargs env $(GO) vet || exit;
|
||||
|
||||
golangci:
|
||||
golangci-lint run
|
||||
|
||||
fmt:
|
||||
gofmt -s -w $(SRCS)
|
||||
|
||||
@@ -57,7 +59,7 @@ mlint:
|
||||
fmtcheck:
|
||||
$(foreach file,$(SRCS),gofmt -s -d $(file);)
|
||||
|
||||
pretest: lint vet fmtcheck
|
||||
pretest: lint vet fmtcheck golangci
|
||||
|
||||
test:
|
||||
$(GO) test -cover -v ./... || exit;
|
||||
@@ -68,26 +70,28 @@ unused:
|
||||
cov:
|
||||
@ go get -v github.com/axw/gocov/gocov
|
||||
@ go get golang.org/x/tools/cmd/cover
|
||||
gocov test | gocov report
|
||||
gocov test -v ./... | gocov report
|
||||
|
||||
clean:
|
||||
echo $(PKGS) | xargs go clean || exit;
|
||||
|
||||
# trivy-to-vuls
|
||||
build-trivy-to-vuls: pretest fmt
|
||||
$(GO) build -o trivy-to-vuls contrib/trivy/cmd/*.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o trivy-to-vuls contrib/trivy/cmd/*.go
|
||||
|
||||
# future-vuls
|
||||
build-future-vuls: pretest fmt
|
||||
$(GO) build -o future-vuls contrib/future-vuls/cmd/*.go
|
||||
$(GO) build -a -ldflags "$(LDFLAGS)" -o future-vuls contrib/future-vuls/cmd/*.go
|
||||
|
||||
|
||||
# integration-test
|
||||
BASE_DIR := '${PWD}/integration/results'
|
||||
# $(shell mkdir -p ${BASE_DIR})
|
||||
NOW=$(shell date --iso-8601=seconds)
|
||||
NOW_JSON_DIR := '${BASE_DIR}/$(NOW)'
|
||||
ONE_SEC_AFTER=$(shell date -d '+1 second' --iso-8601=seconds)
|
||||
ONE_SEC_AFTER_JSON_DIR := '${BASE_DIR}/$(ONE_SEC_AFTER)'
|
||||
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'cargo' 'gomod' 'gobinary' 'jar' 'pom' 'nuget-lock' 'nuget-config' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
|
||||
|
||||
diff:
|
||||
# git clone git@github.com:vulsio/vulsctl.git
|
||||
@@ -99,17 +103,27 @@ diff:
|
||||
# ln -s oldvuls vuls.old
|
||||
# make int
|
||||
# (ex. test 10 times: for i in `seq 10`; do make int ARGS=-quiet ; done)
|
||||
mv ${BASE_DIR}/* /tmp
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
cp integration/data/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml $(ARGS)
|
||||
sleep 1
|
||||
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
cp integration/data/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml $(ARGS)
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
diff-redis:
|
||||
# docker network create redis-nw
|
||||
@@ -122,44 +136,51 @@ diff-redis:
|
||||
# ln -s vuls vuls.new
|
||||
# ln -s oldvuls vuls.old
|
||||
# make int-redis
|
||||
mv ${BASE_DIR}/* /tmp
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
cp integration/data/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml
|
||||
sleep 1
|
||||
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
cp integration/data/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
diff-rdb-redis:
|
||||
mv ${BASE_DIR}/* /tmp
|
||||
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
|
||||
mv ${BASE_DIR} /tmp/${NOW}
|
||||
endif
|
||||
mkdir -p ${NOW_JSON_DIR}
|
||||
cp integration/data/*.json ${NOW_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
cp integration/data/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml
|
||||
# remove reportedAt line
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
# remove "Type": line
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
# remove "SQLite3Path": line
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
for jsonfile in ${NOW_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
for jsonfile in ${ONE_SEC_AFTER_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
sleep 1
|
||||
# new vs new
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${NOW_JSON_DIR}
|
||||
cp integration/data/results/*.json ${NOW_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
|
||||
|
||||
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
|
||||
sleep 1
|
||||
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
|
||||
cp -f ${BASE_DIR}/current/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
|
||||
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
|
||||
|
||||
$(call sed-d)
|
||||
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
|
||||
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
|
||||
$(call count-cve)
|
||||
|
||||
head= $(shell git rev-parse HEAD)
|
||||
prev= $(shell git rev-parse HEAD^)
|
||||
@@ -177,17 +198,51 @@ build-integration:
|
||||
make build
|
||||
mv -f ./vuls ./vuls.${prev}
|
||||
|
||||
git checkout ${branch}
|
||||
git stash apply stash@\{0\}
|
||||
# master
|
||||
git checkout master
|
||||
make build
|
||||
mv -f ./vuls ./vuls.master
|
||||
|
||||
# working tree
|
||||
git checkout ${branch}
|
||||
git stash apply stash@\{0\}
|
||||
make build
|
||||
|
||||
# update integration data
|
||||
git submodule update --remote
|
||||
|
||||
# for integration testing, vuls.new and vuls.old needed.
|
||||
# ex)
|
||||
# $ ln -s ./vuls ./vuls.new
|
||||
# $ ln -s ./vuls.${head} ./vuls.old
|
||||
# or
|
||||
# $ ln -s ./vuls.${prev} ./vuls.old
|
||||
# $ make int
|
||||
# $ make int-redis
|
||||
# then
|
||||
# $ make diff
|
||||
# $ make diff-redis
|
||||
# $ make diff-rdb-redis
|
||||
|
||||
|
||||
define sed-d
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
|
||||
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
|
||||
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
|
||||
endef
|
||||
|
||||
define count-cve
|
||||
for jsonfile in ${NOW_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
for jsonfile in ${ONE_SEC_AFTER_JSON_DIR}/*.json ; do \
|
||||
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
|
||||
done
|
||||
endef
|
||||
|
||||
153
LICENSE
153
LICENSE
@@ -1,21 +1,23 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
@@ -24,34 +26,44 @@ them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
@@ -60,7 +72,7 @@ modification follow.
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
@@ -537,45 +549,35 @@ to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
@@ -629,33 +631,44 @@ to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
Vuls - Vulnerability Scanner
|
||||
Copyright (C) 2016 Future Corporation , Japan.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
Vuls Copyright (C) 2016 Future Corporation , Japan.
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
|
||||
23
README.md
23
README.md
@@ -50,7 +50,7 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
|
||||
[Supports major Linux/FreeBSD](https://vuls.io/docs/en/supported-os.html)
|
||||
|
||||
- Alpine, Amazon Linux, CentOS, Debian, Oracle Linux, Raspbian, RHEL, SUSE Enterprise Linux, and Ubuntu
|
||||
- Alpine, Amazon Linux, CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Raspbian, RHEL, SUSE Enterprise Linux, Fedora, and Ubuntu
|
||||
- FreeBSD
|
||||
- Cloud, on-premise, Running Docker Container
|
||||
|
||||
@@ -71,6 +71,7 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
- [Alpine-secdb](https://git.alpinelinux.org/cgit/alpine-secdb/)
|
||||
- [Red Hat Security Advisories](https://access.redhat.com/security/security-updates/)
|
||||
- [Debian Security Bug Tracker](https://security-tracker.debian.org/tracker/)
|
||||
- [Ubuntu CVE Tracker](https://people.canonical.com/~ubuntu-security/cve/)
|
||||
|
||||
- Commands(yum, zypper, pkg-audit)
|
||||
- RHSA / ALAS / ELSA / FreeBSD-SA
|
||||
@@ -79,11 +80,16 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
- PoC, Exploit
|
||||
- [Exploit Database](https://www.exploit-db.com/)
|
||||
- [Metasploit-Framework modules](https://www.rapid7.com/db/?q=&type=metasploit)
|
||||
- [qazbnm456/awesome-cve-poc](https://github.com/qazbnm456/awesome-cve-poc)
|
||||
- [nomi-sec/PoC-in-GitHub](https://github.com/nomi-sec/PoC-in-GitHub)
|
||||
|
||||
- CERT
|
||||
- [US-CERT](https://www.us-cert.gov/ncas/alerts)
|
||||
- [JPCERT](http://www.jpcert.or.jp/at/2019.html)
|
||||
|
||||
- CISA(Cybersecurity & Infrastructure Security Agency)
|
||||
- [Known Exploited Vulnerabilities Catalog](https://www.cisa.gov/known-exploited-vulnerabilities-catalog)
|
||||
|
||||
- Libraries
|
||||
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
|
||||
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
|
||||
@@ -100,15 +106,15 @@ Vuls is a tool created to solve the problems listed above. It has the following
|
||||
|
||||
- Scan without root privilege, no dependencies
|
||||
- Almost no load on the scan target server
|
||||
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, Fedora, and Ubuntu)
|
||||
|
||||
[Fast Root Scan](https://vuls.io/docs/en/architecture-fast-root-scan.html)
|
||||
|
||||
- Scan with root privilege
|
||||
- Almost no load on the scan target server
|
||||
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Oracle Linux, and RedHat)
|
||||
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Alma Linux, Rocky Linux, Oracle Linux, Fedora, and RedHat)
|
||||
- Detect processes which updated before but not restarting yet using checkrestart of debian-goodies (Debian and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
|
||||
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, Fedora, and Ubuntu)
|
||||
|
||||
### [Remote, Local scan mode, Server mode](https://vuls.io/docs/en/architecture-remote-local.html)
|
||||
|
||||
@@ -183,11 +189,14 @@ see [vulsdoc](https://vuls.io/docs/en/how-to-contribute.html)
|
||||
|
||||
----
|
||||
|
||||
## Stargazers over time
|
||||
## Sponsors
|
||||
|
||||
[](https://starcharts.herokuapp.com/future-architect/vuls)
|
||||
| | |
|
||||
| ------------- | ------------- |
|
||||
| <a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=vuls"><img src="img/sponsor/tines.png" align="left" width="600px" ></a> | Tines is no-code automation for security teams. Build powerful, reliable workflows without a development team. |
|
||||
| <a href="https://www.sakura.ad.jp/"><img src="https://vuls.io/img/icons/sakura.svg" align="left" width="600px" ></a> | SAKURA internet Inc. is an Internet company founded in 1996. We provide cloud computing services such as "Sakura's Shared Server", "Sakura's VPS", and "Sakura's Cloud" to meet the needs of a wide range of customers, from individuals and corporations to the education and public sectors, using its own data centers in Japan. Based on the philosophy of "changing what you want to do into what you can do," we offer DX solutions for all fields. |
|
||||
|
||||
-----;
|
||||
----
|
||||
|
||||
## License
|
||||
|
||||
|
||||
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Only the latest version is supported.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Email kotakanbe@gmail.com
|
||||
@@ -41,17 +41,19 @@ type Config struct {
|
||||
Gost GostConf `json:"gost,omitempty"`
|
||||
Exploit ExploitConf `json:"exploit,omitempty"`
|
||||
Metasploit MetasploitConf `json:"metasploit,omitempty"`
|
||||
KEVuln KEVulnConf `json:"kevuln,omitempty"`
|
||||
|
||||
Slack SlackConf `json:"-"`
|
||||
EMail SMTPConf `json:"-"`
|
||||
HTTP HTTPConf `json:"-"`
|
||||
Syslog SyslogConf `json:"-"`
|
||||
AWS AWSConf `json:"-"`
|
||||
Azure AzureConf `json:"-"`
|
||||
ChatWork ChatWorkConf `json:"-"`
|
||||
Telegram TelegramConf `json:"-"`
|
||||
WpScan WpScanConf `json:"-"`
|
||||
Saas SaasConf `json:"-"`
|
||||
Slack SlackConf `json:"-"`
|
||||
EMail SMTPConf `json:"-"`
|
||||
HTTP HTTPConf `json:"-"`
|
||||
Syslog SyslogConf `json:"-"`
|
||||
AWS AWSConf `json:"-"`
|
||||
Azure AzureConf `json:"-"`
|
||||
ChatWork ChatWorkConf `json:"-"`
|
||||
GoogleChat GoogleChatConf `json:"-"`
|
||||
Telegram TelegramConf `json:"-"`
|
||||
WpScan WpScanConf `json:"-"`
|
||||
Saas SaasConf `json:"-"`
|
||||
|
||||
ReportOpts
|
||||
}
|
||||
@@ -68,20 +70,17 @@ type ScanOpts struct {
|
||||
|
||||
// ReportOpts is options for report
|
||||
type ReportOpts struct {
|
||||
// refactored
|
||||
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
|
||||
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
|
||||
NoProgress bool `json:"noProgress,omitempty"`
|
||||
RefreshCve bool `json:"refreshCve,omitempty"`
|
||||
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
|
||||
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
|
||||
DiffPlus bool `json:"diffPlus,omitempty"`
|
||||
DiffMinus bool `json:"diffMinus,omitempty"`
|
||||
Diff bool `json:"diff,omitempty"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
|
||||
//TODO move to GitHubConf
|
||||
IgnoreGitHubDismissed bool `json:"ignore_git_hub_dismissed,omitempty"`
|
||||
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
|
||||
ConfidenceScoreOver int `json:"confidenceScoreOver,omitempty"`
|
||||
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
|
||||
NoProgress bool `json:"noProgress,omitempty"`
|
||||
RefreshCve bool `json:"refreshCve,omitempty"`
|
||||
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
|
||||
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
|
||||
DiffPlus bool `json:"diffPlus,omitempty"`
|
||||
DiffMinus bool `json:"diffMinus,omitempty"`
|
||||
Diff bool `json:"diff,omitempty"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
}
|
||||
|
||||
// ValidateOnConfigtest validates
|
||||
@@ -109,6 +108,16 @@ func (c Config) ValidateOnScan() bool {
|
||||
if _, err := govalidator.ValidateStruct(c); err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
for _, server := range c.Servers {
|
||||
if !server.Module.IsScanPort() {
|
||||
continue
|
||||
}
|
||||
if es := server.PortScan.Validate(); 0 < len(es) {
|
||||
errs = append(errs, es...)
|
||||
}
|
||||
}
|
||||
|
||||
for _, err := range errs {
|
||||
logging.Log.Error(err)
|
||||
}
|
||||
@@ -150,6 +159,7 @@ func (c *Config) ValidateOnReport() bool {
|
||||
&c.EMail,
|
||||
&c.Slack,
|
||||
&c.ChatWork,
|
||||
&c.GoogleChat,
|
||||
&c.Telegram,
|
||||
&c.Syslog,
|
||||
&c.HTTP,
|
||||
@@ -167,6 +177,7 @@ func (c *Config) ValidateOnReport() bool {
|
||||
&Conf.Gost,
|
||||
&Conf.Exploit,
|
||||
&Conf.Metasploit,
|
||||
&Conf.KEVuln,
|
||||
} {
|
||||
if err := cnf.Validate(); err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Failed to validate %s: %+v", cnf.GetName(), err))
|
||||
@@ -221,16 +232,18 @@ type ServerInfo struct {
|
||||
GitHubRepos map[string]GitHubConf `toml:"githubs" json:"githubs,omitempty"` // key: owner/repo
|
||||
UUIDs map[string]string `toml:"uuids,omitempty" json:"uuids,omitempty"`
|
||||
Memo string `toml:"memo,omitempty" json:"memo,omitempty"`
|
||||
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, RHEL, Amazon
|
||||
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, Alma, Rocky, RHEL, Amazon
|
||||
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
|
||||
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
|
||||
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
|
||||
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
|
||||
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
|
||||
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
|
||||
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
|
||||
IPSIdentifiers map[string]string `toml:"-" json:"ipsIdentifiers,omitempty"`
|
||||
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
|
||||
PortScan *PortScanConf `toml:"portscan,omitempty" json:"portscan,omitempty"`
|
||||
|
||||
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
|
||||
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
|
||||
IPSIdentifiers map[string]string `toml:"-" json:"ipsIdentifiers,omitempty"`
|
||||
|
||||
// internal use
|
||||
LogMsgAnsiColor string `toml:"-" json:"-"` // DebugLog Color
|
||||
@@ -262,7 +275,8 @@ func (cnf WordPressConf) IsZero() bool {
|
||||
|
||||
// GitHubConf is used for GitHub Security Alerts
|
||||
type GitHubConf struct {
|
||||
Token string `json:"-"`
|
||||
Token string `json:"-"`
|
||||
IgnoreGitHubDismissed bool `json:"ignoreGitHubDismissed,omitempty"`
|
||||
}
|
||||
|
||||
// GetServerName returns ServerName if this serverInfo is about host.
|
||||
@@ -286,14 +300,17 @@ func (l Distro) String() string {
|
||||
|
||||
// MajorVersion returns Major version
|
||||
func (l Distro) MajorVersion() (int, error) {
|
||||
if l.Family == constant.Amazon {
|
||||
if isAmazonLinux1(l.Release) {
|
||||
return 1, nil
|
||||
switch l.Family {
|
||||
case constant.Amazon:
|
||||
return strconv.Atoi(getAmazonLinuxVersion(l.Release))
|
||||
case constant.CentOS:
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(strings.TrimPrefix(l.Release, "stream"), ".")[0])
|
||||
}
|
||||
default:
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(l.Release, ".")[0])
|
||||
}
|
||||
return 2, nil
|
||||
}
|
||||
if 0 < len(l.Release) {
|
||||
return strconv.Atoi(strings.Split(l.Release, ".")[0])
|
||||
}
|
||||
return 0, xerrors.New("Release is empty")
|
||||
}
|
||||
|
||||
@@ -70,6 +70,13 @@ func TestDistro_MajorVersion(t *testing.T) {
|
||||
in Distro
|
||||
out int
|
||||
}{
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
Release: "2022 (Amazon Linux)",
|
||||
},
|
||||
out: 2022,
|
||||
},
|
||||
{
|
||||
in: Distro{
|
||||
Family: Amazon,
|
||||
|
||||
32
config/googlechatconf.go
Normal file
32
config/googlechatconf.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GoogleChatConf is GoogleChat config
|
||||
type GoogleChatConf struct {
|
||||
WebHookURL string `valid:"url" json:"-" toml:"webHookURL,omitempty"`
|
||||
SkipIfNoCve bool `valid:"type(bool)" json:"-" toml:"skipIfNoCve"`
|
||||
ServerNameRegexp string `valid:"type(string)" json:"-" toml:"serverNameRegexp,omitempty"`
|
||||
Enabled bool `valid:"type(bool)" json:"-" toml:"-"`
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *GoogleChatConf) Validate() (errs []error) {
|
||||
if !c.Enabled {
|
||||
return
|
||||
}
|
||||
if len(c.WebHookURL) == 0 {
|
||||
errs = append(errs, xerrors.New("googleChatConf.webHookURL must not be empty"))
|
||||
}
|
||||
if !govalidator.IsRegex(c.ServerNameRegexp) {
|
||||
errs = append(errs, xerrors.New("googleChatConf.serverNameRegexp must be regex"))
|
||||
}
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -7,6 +7,6 @@ type JSONLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration JSON file specified by path arg.
|
||||
func (c JSONLoader) Load(path, sudoPass, keyPass string) (err error) {
|
||||
func (c JSONLoader) Load(_, _, _ string) (err error) {
|
||||
return xerrors.New("Not implement yet")
|
||||
}
|
||||
|
||||
61
config/os.go
61
config/os.go
@@ -39,14 +39,11 @@ func (e EOL) IsExtendedSuppportEnded(now time.Time) bool {
|
||||
func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
switch family {
|
||||
case constant.Amazon:
|
||||
rel := "2"
|
||||
if isAmazonLinux1(release) {
|
||||
rel = "1"
|
||||
}
|
||||
eol, found = map[string]EOL{
|
||||
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2": {},
|
||||
}[rel]
|
||||
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"2": {},
|
||||
"2022": {},
|
||||
}[getAmazonLinuxVersion(release)]
|
||||
case constant.RedHat:
|
||||
// https://access.redhat.com/support/policy/updates/errata
|
||||
eol, found = map[string]EOL{
|
||||
@@ -66,14 +63,22 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
}[major(release)]
|
||||
case constant.CentOS:
|
||||
// https://en.wikipedia.org/wiki/CentOS#End-of-support_schedule
|
||||
// TODO Stream
|
||||
eol, found = map[string]EOL{
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {Ended: true},
|
||||
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"3": {Ended: true},
|
||||
"4": {Ended: true},
|
||||
"5": {Ended: true},
|
||||
"6": {Ended: true},
|
||||
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
"stream8": {StandardSupportUntil: time.Date(2024, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Alma:
|
||||
eol, found = map[string]EOL{
|
||||
"8": {StandardSupportUntil: time.Date(2029, 12, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Rocky:
|
||||
eol, found = map[string]EOL{
|
||||
"8": {StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Oracle:
|
||||
eol, found = map[string]EOL{
|
||||
@@ -102,6 +107,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"8": {Ended: true},
|
||||
"9": {StandardSupportUntil: time.Date(2022, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"10": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"11": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Raspbian:
|
||||
// Not found
|
||||
@@ -131,8 +137,11 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"20.04": {
|
||||
StandardSupportUntil: time.Date(2025, 4, 1, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"20.10": {
|
||||
StandardSupportUntil: time.Date(2021, 7, 22, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.04": {
|
||||
StandardSupportUntil: time.Date(2022, 1, 1, 23, 59, 59, 0, time.UTC),
|
||||
StandardSupportUntil: time.Date(2022, 1, 22, 23, 59, 59, 0, time.UTC),
|
||||
},
|
||||
"21.10": {
|
||||
StandardSupportUntil: time.Date(2022, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
@@ -142,7 +151,7 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
//TODO
|
||||
case constant.Alpine:
|
||||
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/alpine/alpine.go#L19
|
||||
// https://wiki.alpinelinux.org/wiki/Alpine_Linux:Releases
|
||||
// https://alpinelinux.org/releases/
|
||||
eol, found = map[string]EOL{
|
||||
"2.0": {Ended: true},
|
||||
"2.1": {Ended: true},
|
||||
@@ -166,6 +175,8 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"3.11": {StandardSupportUntil: time.Date(2021, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.12": {StandardSupportUntil: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.13": {StandardSupportUntil: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.14": {StandardSupportUntil: time.Date(2023, 5, 1, 23, 59, 59, 0, time.UTC)},
|
||||
"3.15": {StandardSupportUntil: time.Date(2023, 11, 1, 23, 59, 59, 0, time.UTC)},
|
||||
}[majorDotMinor(release)]
|
||||
case constant.FreeBSD:
|
||||
// https://www.freebsd.org/security/
|
||||
@@ -176,6 +187,16 @@ func GetEOL(family, release string) (eol EOL, found bool) {
|
||||
"10": {Ended: true},
|
||||
"11": {StandardSupportUntil: time.Date(2021, 9, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"12": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"13": {StandardSupportUntil: time.Date(2026, 1, 31, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
case constant.Fedora:
|
||||
// https://docs.fedoraproject.org/en-US/releases/eol/
|
||||
// https://endoflife.date/fedora
|
||||
eol, found = map[string]EOL{
|
||||
"32": {StandardSupportUntil: time.Date(2021, 5, 25, 23, 59, 59, 0, time.UTC)},
|
||||
"33": {StandardSupportUntil: time.Date(2021, 11, 30, 23, 59, 59, 0, time.UTC)},
|
||||
"34": {StandardSupportUntil: time.Date(2022, 5, 17, 23, 59, 59, 0, time.UTC)},
|
||||
"35": {StandardSupportUntil: time.Date(2022, 12, 7, 23, 59, 59, 0, time.UTC)},
|
||||
}[major(release)]
|
||||
}
|
||||
return
|
||||
@@ -193,6 +214,10 @@ func majorDotMinor(osVer string) (majorDotMinor string) {
|
||||
return fmt.Sprintf("%s.%s", ss[0], ss[1])
|
||||
}
|
||||
|
||||
func isAmazonLinux1(osRelease string) bool {
|
||||
return len(strings.Fields(osRelease)) == 1
|
||||
func getAmazonLinuxVersion(osRelease string) string {
|
||||
ss := strings.Fields(osRelease)
|
||||
if len(ss) == 1 {
|
||||
return "1"
|
||||
}
|
||||
return ss[0]
|
||||
}
|
||||
|
||||
@@ -45,6 +45,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "amazon linux 2022 supported",
|
||||
fields: fields{family: Amazon, release: "2022 (Amazon Linux)"},
|
||||
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
//RHEL
|
||||
{
|
||||
name: "RHEL7 supported",
|
||||
@@ -111,6 +119,56 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// Alma
|
||||
{
|
||||
name: "Alma Linux 8 supported",
|
||||
fields: fields{family: Alma, release: "8"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alma Linux 8 EOL",
|
||||
fields: fields{family: Alma, release: "8"},
|
||||
now: time.Date(2029, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alma Linux 9 Not Found",
|
||||
fields: fields{family: Alma, release: "9"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
// Rocky
|
||||
{
|
||||
name: "Rocky Linux 8 supported",
|
||||
fields: fields{family: Rocky, release: "8"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Rocky Linux 8 EOL",
|
||||
fields: fields{family: Rocky, release: "8"},
|
||||
now: time.Date(2026, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Rocky Linux 9 Not Found",
|
||||
fields: fields{family: Rocky, release: "9"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//Oracle
|
||||
{
|
||||
name: "Oracle Linux 7 supported",
|
||||
@@ -193,6 +251,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 20.10 supported",
|
||||
fields: fields{family: Ubuntu, release: "20.10"},
|
||||
now: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
found: true,
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
},
|
||||
{
|
||||
name: "Ubuntu 21.04 supported",
|
||||
fields: fields{family: Ubuntu, release: "21.04"},
|
||||
@@ -232,6 +298,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Debian 12 is not supported yet",
|
||||
fields: fields{family: Debian, release: "12"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
},
|
||||
//alpine
|
||||
@@ -268,9 +342,25 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.14 not found",
|
||||
name: "Alpine 3.14 supported",
|
||||
fields: fields{family: Alpine, release: "3.14"},
|
||||
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
|
||||
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.15 supported",
|
||||
fields: fields{family: Alpine, release: "3.15"},
|
||||
now: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Alpine 3.16 not found",
|
||||
fields: fields{family: Alpine, release: "3.16"},
|
||||
now: time.Date(2022, 1, 14, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: false,
|
||||
@@ -300,6 +390,14 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 13 supported",
|
||||
fields: fields{family: FreeBSD, release: "13"},
|
||||
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "freebsd 10 eol",
|
||||
fields: fields{family: FreeBSD, release: "10"},
|
||||
@@ -308,6 +406,71 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
// Fedora
|
||||
{
|
||||
name: "Fedora 32 supported",
|
||||
fields: fields{family: Fedora, release: "32"},
|
||||
now: time.Date(2021, 5, 25, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 32 eol on 2021-5-25",
|
||||
fields: fields{family: Fedora, release: "32"},
|
||||
now: time.Date(2021, 5, 26, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 33 supported",
|
||||
fields: fields{family: Fedora, release: "33"},
|
||||
now: time.Date(2021, 11, 30, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 33 eol on 2021-5-26",
|
||||
fields: fields{family: Fedora, release: "32"},
|
||||
now: time.Date(2021, 5, 27, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 34 supported",
|
||||
fields: fields{family: Fedora, release: "34"},
|
||||
now: time.Date(2022, 5, 17, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 32 eol on 2022-5-17",
|
||||
fields: fields{family: Fedora, release: "34"},
|
||||
now: time.Date(2022, 5, 18, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 35 supported",
|
||||
fields: fields{family: Fedora, release: "35"},
|
||||
now: time.Date(2022, 12, 7, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: false,
|
||||
extEnded: false,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
name: "Fedora 35 eol on 2022-12-7",
|
||||
fields: fields{family: Fedora, release: "35"},
|
||||
now: time.Date(2022, 12, 8, 23, 59, 59, 0, time.UTC),
|
||||
stdEnded: true,
|
||||
extEnded: true,
|
||||
found: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
||||
222
config/portscan.go
Normal file
222
config/portscan.go
Normal file
@@ -0,0 +1,222 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// PortScanConf is the setting for using an external port scanner
|
||||
type PortScanConf struct {
|
||||
IsUseExternalScanner bool `toml:"-" json:"-"`
|
||||
|
||||
// Path to external scanner
|
||||
ScannerBinPath string `toml:"scannerBinPath,omitempty" json:"scannerBinPath,omitempty"`
|
||||
|
||||
// set user has privileged
|
||||
HasPrivileged bool `toml:"hasPrivileged,omitempty" json:"hasPrivileged,omitempty"`
|
||||
|
||||
// set the ScanTechniques for ScannerBinPath
|
||||
ScanTechniques []string `toml:"scanTechniques,omitempty" json:"scanTechniques,omitempty"`
|
||||
|
||||
// set the FIREWALL/IDS EVASION AND SPOOFING(Use given port number)
|
||||
SourcePort string `toml:"sourcePort,omitempty" json:"sourcePort,omitempty"`
|
||||
}
|
||||
|
||||
// ScanTechnique is implemented to represent the supported ScanTechniques in an Enum.
|
||||
type ScanTechnique int
|
||||
|
||||
const (
|
||||
// NotSupportTechnique is a ScanTechnique that is currently not supported.
|
||||
NotSupportTechnique ScanTechnique = iota
|
||||
// TCPSYN is SYN scan
|
||||
TCPSYN
|
||||
// TCPConnect is TCP connect scan
|
||||
TCPConnect
|
||||
// TCPACK is ACK scan
|
||||
TCPACK
|
||||
// TCPWindow is Window scan
|
||||
TCPWindow
|
||||
// TCPMaimon is Maimon scan
|
||||
TCPMaimon
|
||||
// TCPNull is Null scan
|
||||
TCPNull
|
||||
// TCPFIN is FIN scan
|
||||
TCPFIN
|
||||
// TCPXmas is Xmas scan
|
||||
TCPXmas
|
||||
)
|
||||
|
||||
var scanTechniqueMap = map[ScanTechnique]string{
|
||||
TCPSYN: "sS",
|
||||
TCPConnect: "sT",
|
||||
TCPACK: "sA",
|
||||
TCPWindow: "sW",
|
||||
TCPMaimon: "sM",
|
||||
TCPNull: "sN",
|
||||
TCPFIN: "sF",
|
||||
TCPXmas: "sX",
|
||||
}
|
||||
|
||||
func (s ScanTechnique) String() string {
|
||||
switch s {
|
||||
case TCPSYN:
|
||||
return "TCPSYN"
|
||||
case TCPConnect:
|
||||
return "TCPConnect"
|
||||
case TCPACK:
|
||||
return "TCPACK"
|
||||
case TCPWindow:
|
||||
return "TCPWindow"
|
||||
case TCPMaimon:
|
||||
return "TCPMaimon"
|
||||
case TCPNull:
|
||||
return "TCPNull"
|
||||
case TCPFIN:
|
||||
return "TCPFIN"
|
||||
case TCPXmas:
|
||||
return "TCPXmas"
|
||||
default:
|
||||
return "NotSupportTechnique"
|
||||
}
|
||||
}
|
||||
|
||||
// GetScanTechniques converts ScanTechniques loaded from config.toml to []scanTechniques.
|
||||
func (c *PortScanConf) GetScanTechniques() []ScanTechnique {
|
||||
if len(c.ScanTechniques) == 0 {
|
||||
return []ScanTechnique{}
|
||||
}
|
||||
|
||||
scanTechniques := []ScanTechnique{}
|
||||
for _, technique := range c.ScanTechniques {
|
||||
findScanTechniqueFlag := false
|
||||
for key, value := range scanTechniqueMap {
|
||||
if strings.EqualFold(value, technique) {
|
||||
scanTechniques = append(scanTechniques, key)
|
||||
findScanTechniqueFlag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !findScanTechniqueFlag {
|
||||
scanTechniques = append(scanTechniques, NotSupportTechnique)
|
||||
}
|
||||
}
|
||||
|
||||
if len(scanTechniques) == 0 {
|
||||
return []ScanTechnique{NotSupportTechnique}
|
||||
}
|
||||
return scanTechniques
|
||||
}
|
||||
|
||||
// Validate validates configuration
|
||||
func (c *PortScanConf) Validate() (errs []error) {
|
||||
if !c.IsUseExternalScanner {
|
||||
if c.IsZero() {
|
||||
return
|
||||
}
|
||||
errs = append(errs, xerrors.New("To enable the PortScan option, ScannerBinPath must be set."))
|
||||
}
|
||||
|
||||
if _, err := os.Stat(c.ScannerBinPath); err != nil {
|
||||
errs = append(errs, xerrors.Errorf(
|
||||
"scanner is not found. ScannerBinPath: %s not exists", c.ScannerBinPath))
|
||||
}
|
||||
|
||||
scanTechniques := c.GetScanTechniques()
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique == NotSupportTechnique {
|
||||
errs = append(errs, xerrors.New("There is an unsupported option in ScanTechniques."))
|
||||
}
|
||||
}
|
||||
|
||||
// It does not currently support multiple ScanTechniques.
|
||||
// But if it supports UDP scanning, it will need to accept multiple ScanTechniques.
|
||||
if len(scanTechniques) > 1 {
|
||||
errs = append(errs, xerrors.New("Currently multiple ScanTechniques are not supported."))
|
||||
}
|
||||
|
||||
if c.HasPrivileged {
|
||||
if os.Geteuid() != 0 {
|
||||
output, err := exec.Command("getcap", c.ScannerBinPath).Output()
|
||||
if err != nil {
|
||||
errs = append(errs, xerrors.Errorf("Failed to check capability of %s. error message: %w", c.ScannerBinPath, err))
|
||||
} else {
|
||||
parseOutput := strings.SplitN(string(output), "=", 2)
|
||||
if len(parseOutput) != 2 {
|
||||
errs = append(errs, xerrors.Errorf("Failed to parse getcap outputs. please execute this command: `$ getcap %s`. If the following string (`/usr/bin/nmap = ... `) is not displayed, you need to set the capability with the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", c.ScannerBinPath, c.ScannerBinPath))
|
||||
} else {
|
||||
parseCapability := strings.Split(strings.TrimSpace(parseOutput[1]), "+")
|
||||
capabilities := strings.Split(parseCapability[0], ",")
|
||||
for _, needCap := range []string{"cap_net_bind_service", "cap_net_admin", "cap_net_raw"} {
|
||||
existCapFlag := false
|
||||
for _, cap := range capabilities {
|
||||
if needCap == cap {
|
||||
existCapFlag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if existCapFlag {
|
||||
continue
|
||||
}
|
||||
|
||||
errs = append(errs, xerrors.Errorf("Not enough capability to execute. needs: ['cap_net_bind_service', 'cap_net_admin', 'cap_net_raw'], actual: %s. To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", capabilities, c.ScannerBinPath))
|
||||
break
|
||||
}
|
||||
|
||||
if parseCapability[1] != "eip" {
|
||||
errs = append(errs, xerrors.Errorf("Capability(`cap_net_bind_service,cap_net_admin,cap_net_raw`) must belong to the following capability set(need: eip, actual: %s). To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", parseCapability[1], c.ScannerBinPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !c.HasPrivileged {
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique != TCPConnect && scanTechnique != NotSupportTechnique {
|
||||
errs = append(errs, xerrors.New("If not privileged, only TCPConnect Scan(-sT) can be used."))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.SourcePort != "" {
|
||||
for _, scanTechnique := range scanTechniques {
|
||||
if scanTechnique == TCPConnect {
|
||||
errs = append(errs, xerrors.New("SourcePort Option(-g/--source-port) is incompatible with the default TCPConnect Scan(-sT)."))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
portNumber, err := strconv.Atoi(c.SourcePort)
|
||||
if err != nil {
|
||||
errs = append(errs, xerrors.Errorf("SourcePort conversion failed. %w", err))
|
||||
} else {
|
||||
if portNumber < 0 || 65535 < portNumber {
|
||||
errs = append(errs, xerrors.Errorf("SourcePort(%s) must be between 0 and 65535.", c.SourcePort))
|
||||
}
|
||||
|
||||
if portNumber == 0 {
|
||||
errs = append(errs, xerrors.New("SourcePort(0) may not work on all systems."))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, err := govalidator.ValidateStruct(c)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// IsZero return whether this struct is not specified in config.toml
|
||||
func (c PortScanConf) IsZero() bool {
|
||||
return c.ScannerBinPath == "" && !c.HasPrivileged && len(c.ScanTechniques) == 0 && c.SourcePort == ""
|
||||
}
|
||||
69
config/portscan_test.go
Normal file
69
config/portscan_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPortScanConf_getScanTechniques(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
techniques []string
|
||||
want []ScanTechnique
|
||||
}{
|
||||
{
|
||||
name: "nil",
|
||||
techniques: []string{},
|
||||
want: []ScanTechnique{},
|
||||
},
|
||||
{
|
||||
name: "single",
|
||||
techniques: []string{"sS"},
|
||||
want: []ScanTechnique{TCPSYN},
|
||||
},
|
||||
{
|
||||
name: "multiple",
|
||||
techniques: []string{"sS", "sT"},
|
||||
want: []ScanTechnique{TCPSYN, TCPConnect},
|
||||
},
|
||||
{
|
||||
name: "unknown",
|
||||
techniques: []string{"sU"},
|
||||
want: []ScanTechnique{NotSupportTechnique},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
c := PortScanConf{ScanTechniques: tt.techniques}
|
||||
if got := c.GetScanTechniques(); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("PortScanConf.getScanTechniques() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPortScanConf_IsZero(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
conf PortScanConf
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "not zero",
|
||||
conf: PortScanConf{ScannerBinPath: "/usr/bin/nmap"},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "zero",
|
||||
conf: PortScanConf{},
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.conf.IsZero(); got != tt.want {
|
||||
t.Errorf("PortScanConf.IsZero() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -84,7 +84,7 @@ func (s ScanMode) String() string {
|
||||
return ss + " mode"
|
||||
}
|
||||
|
||||
func setScanMode(server *ServerInfo, d ServerInfo) error {
|
||||
func setScanMode(server *ServerInfo) error {
|
||||
if len(server.ScanMode) == 0 {
|
||||
server.ScanMode = Conf.Default.ScanMode
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ type TOMLLoader struct {
|
||||
}
|
||||
|
||||
// Load load the configuration TOML file specified by path arg.
|
||||
func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
func (c TOMLLoader) Load(pathToToml, _ string) error {
|
||||
// util.Log.Infof("Loading config: %s", pathToToml)
|
||||
if _, err := toml.DecodeFile(pathToToml, &Conf); err != nil {
|
||||
return err
|
||||
@@ -27,6 +27,7 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
&Conf.Gost,
|
||||
&Conf.Exploit,
|
||||
&Conf.Metasploit,
|
||||
&Conf.KEVuln,
|
||||
} {
|
||||
cnf.Init()
|
||||
}
|
||||
@@ -34,11 +35,11 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
index := 0
|
||||
for name, server := range Conf.Servers {
|
||||
server.ServerName = name
|
||||
if err := setDefaultIfEmpty(&server, Conf.Default); err != nil {
|
||||
if err := setDefaultIfEmpty(&server); err != nil {
|
||||
return xerrors.Errorf("Failed to set default value to config. server: %s, err: %w", name, err)
|
||||
}
|
||||
|
||||
if err := setScanMode(&server, Conf.Default); err != nil {
|
||||
if err := setScanMode(&server); err != nil {
|
||||
return xerrors.Errorf("Failed to set ScanMode: %w", err)
|
||||
}
|
||||
|
||||
@@ -125,6 +126,10 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan.ScannerBinPath != "" {
|
||||
server.PortScan.IsUseExternalScanner = true
|
||||
}
|
||||
|
||||
server.LogMsgAnsiColor = Colors[index%len(Colors)]
|
||||
index++
|
||||
|
||||
@@ -133,7 +138,7 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
|
||||
func setDefaultIfEmpty(server *ServerInfo) error {
|
||||
if server.Type != constant.ServerTypePseudo {
|
||||
if len(server.Host) == 0 {
|
||||
return xerrors.Errorf("server.host is empty")
|
||||
@@ -203,6 +208,13 @@ func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
|
||||
}
|
||||
}
|
||||
|
||||
if server.PortScan == nil {
|
||||
server.PortScan = Conf.Default.PortScan
|
||||
if server.PortScan == nil {
|
||||
server.PortScan = &PortScanConf{}
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.IgnoredJSONKeys) == 0 {
|
||||
server.IgnoredJSONKeys = Conf.Default.IgnoredJSONKeys
|
||||
}
|
||||
|
||||
@@ -248,7 +248,7 @@ func (cnf *GostConf) Init() {
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// MetasploitConf is gost go-metasploitdb
|
||||
// MetasploitConf is go-msfdb config
|
||||
type MetasploitConf struct {
|
||||
VulnDict
|
||||
}
|
||||
@@ -274,3 +274,30 @@ func (cnf *MetasploitConf) Init() {
|
||||
cnf.setDefault("go-msfdb.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
// KEVulnConf is go-kev config
|
||||
type KEVulnConf struct {
|
||||
VulnDict
|
||||
}
|
||||
|
||||
const kevulnDBType = "KEVULN_TYPE"
|
||||
const kevulnDBURL = "KEVULN_URL"
|
||||
const kevulnDBPATH = "KEVULN_SQLITE3_PATH"
|
||||
|
||||
// Init set options with the following priority.
|
||||
// 1. Environment variable
|
||||
// 2. config.toml
|
||||
func (cnf *KEVulnConf) Init() {
|
||||
cnf.Name = "kevuln"
|
||||
if os.Getenv(kevulnDBType) != "" {
|
||||
cnf.Type = os.Getenv(kevulnDBType)
|
||||
}
|
||||
if os.Getenv(kevulnDBURL) != "" {
|
||||
cnf.URL = os.Getenv(kevulnDBURL)
|
||||
}
|
||||
if os.Getenv(kevulnDBPATH) != "" {
|
||||
cnf.SQLite3Path = os.Getenv(kevulnDBPATH)
|
||||
}
|
||||
cnf.setDefault("go-kev.sqlite3")
|
||||
cnf.DebugSQL = Conf.DebugSQL
|
||||
}
|
||||
|
||||
@@ -17,6 +17,12 @@ const (
|
||||
// CentOS is
|
||||
CentOS = "centos"
|
||||
|
||||
// Alma is
|
||||
Alma = "alma"
|
||||
|
||||
// Rocky is
|
||||
Rocky = "rocky"
|
||||
|
||||
// Fedora is
|
||||
// Fedora = "fedora"
|
||||
|
||||
@@ -55,4 +61,10 @@ const (
|
||||
|
||||
// ServerTypePseudo is used for ServerInfo.Type, r.Family
|
||||
ServerTypePseudo = "pseudo"
|
||||
|
||||
// DeepSecurity is
|
||||
DeepSecurity = "deepsecurity"
|
||||
|
||||
//Fedora is
|
||||
Fedora = "fedora"
|
||||
)
|
||||
|
||||
@@ -81,6 +81,14 @@ func main() {
|
||||
return
|
||||
},
|
||||
}
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show version",
|
||||
Long: "Show version",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("future-vuls-%s-%s\n", config.Version, config.Revision)
|
||||
},
|
||||
}
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&serverUUID, "uuid", "", "server uuid. ENV: VULS_SERVER_UUID")
|
||||
cmdFvulsUploader.PersistentFlags().StringVar(&configFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
|
||||
cmdFvulsUploader.PersistentFlags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin. ENV: VULS_STDIN")
|
||||
@@ -92,6 +100,7 @@ func main() {
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "future-vuls"}
|
||||
rootCmd.AddCommand(cmdFvulsUploader)
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/contrib/trivy/parser"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@@ -34,45 +34,55 @@ func main() {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err = buf.ReadFrom(reader); err != nil {
|
||||
fmt.Printf("Failed to read file. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
trivyJSON = buf.Bytes()
|
||||
} else {
|
||||
if trivyJSON, err = ioutil.ReadFile(jsonFilePath); err != nil {
|
||||
fmt.Println("Failed to read file", err)
|
||||
fmt.Printf("Failed to read file. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
scanResult := &models.ScanResult{
|
||||
JSONVersion: models.JSONVersion,
|
||||
ScannedCves: models.VulnInfos{},
|
||||
}
|
||||
if scanResult, err = parser.Parse(trivyJSON, scanResult); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
parser, err := parser.NewParser(trivyJSON)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to new parser. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
scanResult, err := parser.Parse(trivyJSON)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to parse. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
var resultJSON []byte
|
||||
if resultJSON, err = json.MarshalIndent(scanResult, "", " "); err != nil {
|
||||
fmt.Println("Failed to create json", err)
|
||||
fmt.Printf("Failed to create json. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(resultJSON))
|
||||
return
|
||||
},
|
||||
}
|
||||
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show version",
|
||||
Long: "Show version",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("trivy-to-vuls-%s-%s\n", config.Version, config.Revision)
|
||||
},
|
||||
}
|
||||
|
||||
cmdTrivyToVuls.Flags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonDir, "trivy-json-dir", "d", "./", "trivy json dir")
|
||||
cmdTrivyToVuls.Flags().StringVarP(&jsonFileName, "trivy-json-file-name", "f", "results.json", "trivy json file name")
|
||||
|
||||
var rootCmd = &cobra.Command{Use: "trivy-to-vuls"}
|
||||
rootCmd.AddCommand(cmdTrivyToVuls)
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
if err = rootCmd.Execute(); err != nil {
|
||||
fmt.Println("Failed to execute command", err)
|
||||
fmt.Printf("Failed to execute command. err: %+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
@@ -2,179 +2,32 @@ package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
v2 "github.com/future-architect/vuls/contrib/trivy/parser/v2"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Parse :
|
||||
func Parse(vulnJSON []byte, scanResult *models.ScanResult) (result *models.ScanResult, err error) {
|
||||
var trivyResults report.Results
|
||||
if err = json.Unmarshal(vulnJSON, &trivyResults); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pkgs := models.Packages{}
|
||||
vulnInfos := models.VulnInfos{}
|
||||
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
|
||||
for _, trivyResult := range trivyResults {
|
||||
if IsTrivySupportedOS(trivyResult.Type) {
|
||||
overrideServerData(scanResult, &trivyResult)
|
||||
}
|
||||
for _, vuln := range trivyResult.Vulnerabilities {
|
||||
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
|
||||
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
|
||||
CveID: vuln.VulnerabilityID,
|
||||
Confidences: models.Confidences{
|
||||
{
|
||||
Score: 100,
|
||||
DetectionMethod: models.TrivyMatchStr,
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
// VulnType : "",
|
||||
}
|
||||
}
|
||||
vulnInfo := vulnInfos[vuln.VulnerabilityID]
|
||||
var notFixedYet bool
|
||||
fixState := ""
|
||||
if len(vuln.FixedVersion) == 0 {
|
||||
notFixedYet = true
|
||||
fixState = "Affected"
|
||||
}
|
||||
var references models.References
|
||||
for _, reference := range vuln.References {
|
||||
references = append(references, models.Reference{
|
||||
Source: "trivy",
|
||||
Link: reference,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(references, func(i, j int) bool {
|
||||
return references[i].Link < references[j].Link
|
||||
})
|
||||
|
||||
var published time.Time
|
||||
if vuln.PublishedDate != nil {
|
||||
published = *vuln.PublishedDate
|
||||
}
|
||||
|
||||
var lastModified time.Time
|
||||
if vuln.LastModifiedDate != nil {
|
||||
lastModified = *vuln.LastModifiedDate
|
||||
}
|
||||
|
||||
vulnInfo.CveContents = models.CveContents{
|
||||
models.Trivy: models.CveContent{
|
||||
Cvss3Severity: vuln.Severity,
|
||||
References: references,
|
||||
Title: vuln.Title,
|
||||
Summary: vuln.Description,
|
||||
Published: published,
|
||||
LastModified: lastModified,
|
||||
},
|
||||
}
|
||||
// do only if image type is Vuln
|
||||
if IsTrivySupportedOS(trivyResult.Type) {
|
||||
pkgs[vuln.PkgName] = models.Package{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
}
|
||||
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
|
||||
Name: vuln.PkgName,
|
||||
NotFixedYet: notFixedYet,
|
||||
FixState: fixState,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
} else {
|
||||
// LibraryScanの結果
|
||||
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
|
||||
Key: trivyResult.Type,
|
||||
Name: vuln.PkgName,
|
||||
Path: trivyResult.Target,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Libs = append(libScanner.Libs, types.Library{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
})
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
vulnInfos[vuln.VulnerabilityID] = vulnInfo
|
||||
}
|
||||
}
|
||||
// flatten and unique libraries
|
||||
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
|
||||
for path, v := range uniqueLibraryScannerPaths {
|
||||
uniqueLibrary := map[string]types.Library{}
|
||||
for _, lib := range v.Libs {
|
||||
uniqueLibrary[lib.Name+lib.Version] = lib
|
||||
}
|
||||
|
||||
var libraries []types.Library
|
||||
for _, library := range uniqueLibrary {
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
sort.Slice(libraries, func(i, j int) bool {
|
||||
return libraries[i].Name < libraries[j].Name
|
||||
})
|
||||
|
||||
libscanner := models.LibraryScanner{
|
||||
Path: path,
|
||||
Libs: libraries,
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscanner)
|
||||
}
|
||||
sort.Slice(libraryScanners, func(i, j int) bool {
|
||||
return libraryScanners[i].Path < libraryScanners[j].Path
|
||||
})
|
||||
scanResult.ScannedCves = vulnInfos
|
||||
scanResult.Packages = pkgs
|
||||
scanResult.LibraryScanners = libraryScanners
|
||||
return scanResult, nil
|
||||
// Parser is a parser interface
|
||||
type Parser interface {
|
||||
Parse(vulnJSON []byte) (result *models.ScanResult, err error)
|
||||
}
|
||||
|
||||
// IsTrivySupportedOS :
|
||||
func IsTrivySupportedOS(family string) bool {
|
||||
supportedFamilies := []string{
|
||||
os.RedHat,
|
||||
os.Debian,
|
||||
os.Ubuntu,
|
||||
os.CentOS,
|
||||
os.Fedora,
|
||||
os.Amazon,
|
||||
os.Oracle,
|
||||
os.Windows,
|
||||
os.OpenSUSE,
|
||||
os.OpenSUSELeap,
|
||||
os.OpenSUSETumbleweed,
|
||||
os.SLES,
|
||||
os.Photon,
|
||||
os.Alpine,
|
||||
}
|
||||
for _, supportedFamily := range supportedFamilies {
|
||||
if family == supportedFamily {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
// Report is used for judgeing the scheme version of trivy
|
||||
type Report struct {
|
||||
SchemaVersion int `json:",omitempty"`
|
||||
}
|
||||
|
||||
func overrideServerData(scanResult *models.ScanResult, trivyResult *report.Result) {
|
||||
scanResult.Family = trivyResult.Type
|
||||
scanResult.ServerName = trivyResult.Target
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
"trivy-target": trivyResult.Target,
|
||||
// NewParser make a parser for the schema version of trivy
|
||||
func NewParser(vulnJSON []byte) (Parser, error) {
|
||||
r := Report{}
|
||||
if err := json.Unmarshal(vulnJSON, &r); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to parse JSON. Please use the latest version of trivy, trivy-to-vuls and future-vuls")
|
||||
}
|
||||
switch r.SchemaVersion {
|
||||
case 2:
|
||||
return v2.ParserV2{}, nil
|
||||
default:
|
||||
return nil, xerrors.Errorf("Failed to parse trivy json. SchemeVersion %d is not supported yet. Please contact support", r.SchemaVersion)
|
||||
}
|
||||
scanResult.ScannedAt = time.Now()
|
||||
scanResult.ScannedBy = "trivy"
|
||||
scanResult.ScannedVia = "trivy"
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
60
contrib/trivy/parser/v2/parser.go
Normal file
60
contrib/trivy/parser/v2/parser.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package v2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/contrib/trivy/pkg"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// ParserV2 is a parser for scheme v2
|
||||
type ParserV2 struct {
|
||||
}
|
||||
|
||||
// Parse trivy's JSON and convert to the Vuls struct
|
||||
func (p ParserV2) Parse(vulnJSON []byte) (result *models.ScanResult, err error) {
|
||||
var report report.Report
|
||||
if err = json.Unmarshal(vulnJSON, &report); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
scanResult, err := pkg.Convert(report.Results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
setScanResultMeta(scanResult, &report)
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
func setScanResultMeta(scanResult *models.ScanResult, report *report.Report) {
|
||||
for _, r := range report.Results {
|
||||
const trivyTarget = "trivy-target"
|
||||
if pkg.IsTrivySupportedOS(r.Type) {
|
||||
scanResult.Family = r.Type
|
||||
scanResult.ServerName = r.Target
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
trivyTarget: r.Target,
|
||||
}
|
||||
} else if pkg.IsTrivySupportedLib(r.Type) {
|
||||
if scanResult.Family == "" {
|
||||
scanResult.Family = constant.ServerTypePseudo
|
||||
}
|
||||
if scanResult.ServerName == "" {
|
||||
scanResult.ServerName = "library scan by trivy"
|
||||
}
|
||||
if _, ok := scanResult.Optional[trivyTarget]; !ok {
|
||||
scanResult.Optional = map[string]interface{}{
|
||||
trivyTarget: r.Target,
|
||||
}
|
||||
}
|
||||
}
|
||||
scanResult.ScannedAt = time.Now()
|
||||
scanResult.ScannedBy = "trivy"
|
||||
scanResult.ScannedVia = "trivy"
|
||||
}
|
||||
}
|
||||
725
contrib/trivy/parser/v2/parser_test.go
Normal file
725
contrib/trivy/parser/v2/parser_test.go
Normal file
@@ -0,0 +1,725 @@
|
||||
package v2
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/d4l3k/messagediff"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
vulnJSON []byte
|
||||
expected *models.ScanResult
|
||||
}{
|
||||
"image redis": {
|
||||
vulnJSON: redisTrivy,
|
||||
expected: redisSR,
|
||||
},
|
||||
"image struts": {
|
||||
vulnJSON: strutsTrivy,
|
||||
expected: strutsSR,
|
||||
},
|
||||
"image osAndLib": {
|
||||
vulnJSON: osAndLibTrivy,
|
||||
expected: osAndLibSR,
|
||||
},
|
||||
}
|
||||
|
||||
for testcase, v := range cases {
|
||||
actual, err := ParserV2{}.Parse(v.vulnJSON)
|
||||
if err != nil {
|
||||
t.Errorf("%s", err)
|
||||
}
|
||||
|
||||
diff, equal := messagediff.PrettyDiff(
|
||||
v.expected,
|
||||
actual,
|
||||
messagediff.IgnoreStructField("ScannedAt"),
|
||||
messagediff.IgnoreStructField("Title"),
|
||||
messagediff.IgnoreStructField("Summary"),
|
||||
messagediff.IgnoreStructField("LastModified"),
|
||||
messagediff.IgnoreStructField("Published"),
|
||||
)
|
||||
if !equal {
|
||||
t.Errorf("test: %s, diff %s", testcase, diff)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var redisTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "redis",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"OS": {
|
||||
"Family": "debian",
|
||||
"Name": "10.10"
|
||||
},
|
||||
"ImageID": "sha256:ddcca4b8a6f0367b5de2764dfe76b0a4bfa6d75237932185923705da47004347",
|
||||
"DiffIDs": [
|
||||
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
|
||||
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
|
||||
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
|
||||
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
|
||||
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
|
||||
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
|
||||
],
|
||||
"RepoTags": [
|
||||
"redis:latest"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"redis@sha256:66ce9bc742609650afc3de7009658473ed601db4e926a5b16d239303383bacad"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "fa59f1c2817c9095f8f7272a4ab9b11db0332b33efb3a82c00a3d1fec8763684",
|
||||
"created": "2021-08-17T14:30:06.550779326Z",
|
||||
"docker_version": "20.10.7",
|
||||
"history": [
|
||||
{
|
||||
"created": "2021-08-17T01:24:06Z",
|
||||
"created_by": "/bin/sh -c #(nop) ADD file:87b4e60fe3af680c6815448374365a44e9ea461bc8ade2960b4639c25aed3ba9 in / "
|
||||
},
|
||||
{
|
||||
"created": "2021-08-17T14:30:06Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"redis-server\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
|
||||
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
|
||||
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
|
||||
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
|
||||
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
|
||||
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"redis-server"
|
||||
],
|
||||
"Entrypoint": [
|
||||
"docker-entrypoint.sh"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"GOSU_VERSION=1.12",
|
||||
"REDIS_VERSION=6.2.5",
|
||||
"REDIS_DOWNLOAD_URL=http://download.redis.io/releases/redis-6.2.5.tar.gz",
|
||||
"REDIS_DOWNLOAD_SHA=4b9a75709a1b74b3785e20a6c158cab94cf52298aa381eea947a678a60d551ae"
|
||||
],
|
||||
"Image": "sha256:befbd3fc62bffcd0115008969a014faaad07828b2c54b4bcfd2d9fc3aa2508cd",
|
||||
"Volumes": {
|
||||
"/data": {}
|
||||
},
|
||||
"WorkingDir": "/data"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "redis (debian 10.10)",
|
||||
"Class": "os-pkgs",
|
||||
"Type": "debian",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "adduser",
|
||||
"Version": "3.118",
|
||||
"SrcName": "adduser",
|
||||
"SrcVersion": "3.118",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "apt",
|
||||
"Version": "1.8.2.3",
|
||||
"SrcName": "apt",
|
||||
"SrcVersion": "1.8.2.3",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "bsdutils",
|
||||
"Version": "1:2.33.1-0.1",
|
||||
"SrcName": "util-linux",
|
||||
"SrcVersion": "2.33.1-0.1",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "pkgA",
|
||||
"Version": "1:2.33.1-0.1",
|
||||
"SrcName": "util-linux",
|
||||
"SrcVersion": "2.33.1-0.1",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2011-3374",
|
||||
"PkgName": "apt",
|
||||
"InstalledVersion": "1.8.2.3",
|
||||
"Layer": {
|
||||
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
|
||||
},
|
||||
"SeveritySource": "debian",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2011-3374",
|
||||
"Description": "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
|
||||
"Severity": "LOW",
|
||||
"CweIDs": [
|
||||
"CWE-347"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:L/A:N",
|
||||
"V2Score": 4.3,
|
||||
"V3Score": 3.7
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://access.redhat.com/security/cve/cve-2011-3374"
|
||||
],
|
||||
"PublishedDate": "2019-11-26T00:15:00Z",
|
||||
"LastModifiedDate": "2021-02-09T16:08:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`)
|
||||
var redisSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "redis (debian 10.10)",
|
||||
Family: "debian",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2011-3374": {
|
||||
CveID: "CVE-2011-3374",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
models.PackageFixStatus{
|
||||
Name: "apt",
|
||||
NotFixedYet: true,
|
||||
FixState: "Affected",
|
||||
FixedIn: "",
|
||||
}},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "",
|
||||
Summary: "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
|
||||
Cvss3Severity: "LOW",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://access.redhat.com/security/cve/cve-2011-3374"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{},
|
||||
Packages: models.Packages{
|
||||
"apt": models.Package{
|
||||
Name: "apt",
|
||||
Version: "1.8.2.3",
|
||||
},
|
||||
"adduser": models.Package{
|
||||
Name: "adduser",
|
||||
Version: "3.118",
|
||||
},
|
||||
"bsdutils": models.Package{
|
||||
Name: "bsdutils",
|
||||
Version: "1:2.33.1-0.1",
|
||||
},
|
||||
"pkgA": models.Package{
|
||||
Name: "pkgA",
|
||||
Version: "1:2.33.1-0.1",
|
||||
},
|
||||
},
|
||||
SrcPackages: models.SrcPackages{
|
||||
"util-linux": models.SrcPackage{
|
||||
Name: "util-linux",
|
||||
Version: "2.33.1-0.1",
|
||||
BinaryNames: []string{"bsdutils", "pkgA"},
|
||||
},
|
||||
},
|
||||
Optional: map[string]interface{}{
|
||||
"trivy-target": "redis (debian 10.10)",
|
||||
},
|
||||
}
|
||||
|
||||
var strutsTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "/data/struts-1.2.7/lib",
|
||||
"ArtifactType": "filesystem",
|
||||
"Metadata": {
|
||||
"ImageConfig": {
|
||||
"architecture": "",
|
||||
"created": "0001-01-01T00:00:00Z",
|
||||
"os": "",
|
||||
"rootfs": {
|
||||
"type": "",
|
||||
"diff_ids": null
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "Java",
|
||||
"Class": "lang-pkgs",
|
||||
"Type": "jar",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "oro:oro",
|
||||
"Version": "2.0.7",
|
||||
"Layer": {}
|
||||
},
|
||||
{
|
||||
"Name": "struts:struts",
|
||||
"Version": "1.2.7",
|
||||
"Layer": {}
|
||||
},
|
||||
{
|
||||
"Name": "commons-beanutils:commons-beanutils",
|
||||
"Version": "1.7.0",
|
||||
"Layer": {}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2014-0114",
|
||||
"PkgName": "commons-beanutils:commons-beanutils",
|
||||
"InstalledVersion": "1.7.0",
|
||||
"FixedVersion": "1.9.2",
|
||||
"Layer": {},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2014-0114",
|
||||
"Title": "Apache Struts 1: Class Loader manipulation via request parameters",
|
||||
"Description": "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
|
||||
"Severity": "HIGH",
|
||||
"CweIDs": [
|
||||
"CWE-20"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V2Score": 7.5
|
||||
},
|
||||
"redhat": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V2Score": 7.5
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"http://advisories.mageia.org/MGASA-2014-0219.html"
|
||||
],
|
||||
"PublishedDate": "2014-04-30T10:49:00Z",
|
||||
"LastModifiedDate": "2021-01-26T18:15:00Z"
|
||||
},
|
||||
{
|
||||
"VulnerabilityID": "CVE-2012-1007",
|
||||
"PkgName": "struts:struts",
|
||||
"InstalledVersion": "1.2.7",
|
||||
"Layer": {},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2012-1007",
|
||||
"Title": "struts: multiple XSS flaws",
|
||||
"Description": "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
|
||||
"Severity": "MEDIUM",
|
||||
"CweIDs": [
|
||||
"CWE-79"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V2Score": 4.3
|
||||
},
|
||||
"redhat": {
|
||||
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
|
||||
"V2Score": 4.3
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"
|
||||
],
|
||||
"PublishedDate": "2012-02-07T04:09:00Z",
|
||||
"LastModifiedDate": "2018-10-17T01:29:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`)
|
||||
|
||||
var strutsSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "library scan by trivy",
|
||||
Family: "pseudo",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2014-0114": {
|
||||
CveID: "CVE-2014-0114",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "Apache Struts 1: Class Loader manipulation via request parameters",
|
||||
Summary: "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
|
||||
Cvss3Severity: "HIGH",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "http://advisories.mageia.org/MGASA-2014-0219.html"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "jar",
|
||||
Name: "commons-beanutils:commons-beanutils",
|
||||
FixedIn: "1.9.2",
|
||||
//TODO use Artifactname?
|
||||
Path: "Java",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
"CVE-2012-1007": {
|
||||
CveID: "CVE-2012-1007",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "struts: multiple XSS flaws",
|
||||
Summary: "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
|
||||
Cvss3Severity: "MEDIUM",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "jar",
|
||||
Name: "struts:struts",
|
||||
FixedIn: "",
|
||||
//TODO use Artifactname?
|
||||
Path: "Java",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{
|
||||
models.LibraryScanner{
|
||||
Type: "jar",
|
||||
LockfilePath: "Java",
|
||||
Libs: []models.Library{
|
||||
{
|
||||
Name: "commons-beanutils:commons-beanutils",
|
||||
Version: "1.7.0",
|
||||
},
|
||||
{
|
||||
Name: "oro:oro",
|
||||
Version: "2.0.7",
|
||||
},
|
||||
{
|
||||
Name: "struts:struts",
|
||||
Version: "1.2.7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Packages: models.Packages{},
|
||||
SrcPackages: models.SrcPackages{},
|
||||
Optional: map[string]interface{}{
|
||||
"trivy-target": "Java",
|
||||
},
|
||||
}
|
||||
|
||||
var osAndLibTrivy = []byte(`
|
||||
{
|
||||
"SchemaVersion": 2,
|
||||
"ArtifactName": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0",
|
||||
"ArtifactType": "container_image",
|
||||
"Metadata": {
|
||||
"OS": {
|
||||
"Family": "debian",
|
||||
"Name": "10.2"
|
||||
},
|
||||
"ImageID": "sha256:5a992077baba51b97f27591a10d54d2f2723dc9c81a3fe419e261023f2554933",
|
||||
"DiffIDs": [
|
||||
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
|
||||
],
|
||||
"RepoTags": [
|
||||
"quay.io/fluentd_elasticsearch/fluentd:v2.9.0"
|
||||
],
|
||||
"RepoDigests": [
|
||||
"quay.io/fluentd_elasticsearch/fluentd@sha256:54716d825ec9791ffb403ac17a1e82159c98ac6161e02b2a054595ad01aa6726"
|
||||
],
|
||||
"ImageConfig": {
|
||||
"architecture": "amd64",
|
||||
"container": "232f3fc7ddffd71dc3ff52c6c0c3a5feea2f51acffd9b53850a8fc6f1a15319a",
|
||||
"created": "2020-03-04T13:59:39.161374106Z",
|
||||
"docker_version": "19.03.4",
|
||||
"history": [
|
||||
{
|
||||
"created": "2020-03-04T13:59:39.161374106Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"/run.sh\"]",
|
||||
"empty_layer": true
|
||||
}
|
||||
],
|
||||
"os": "linux",
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"Cmd": [
|
||||
"/run.sh"
|
||||
],
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
|
||||
],
|
||||
"Image": "sha256:2a538358cddc4824e9eff1531e0c63ae5e3cda85d2984c647df9b1c816b9b86b",
|
||||
"ExposedPorts": {
|
||||
"80/tcp": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Results": [
|
||||
{
|
||||
"Target": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
"Class": "os-pkgs",
|
||||
"Type": "debian",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "libgnutls30",
|
||||
"Version": "3.6.7-4",
|
||||
"SrcName": "gnutls28",
|
||||
"SrcVersion": "3.6.7-4",
|
||||
"Layer": {
|
||||
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
|
||||
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2021-20231",
|
||||
"PkgName": "libgnutls30",
|
||||
"InstalledVersion": "3.6.7-4",
|
||||
"FixedVersion": "3.6.7-4+deb10u7",
|
||||
"Layer": {
|
||||
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
|
||||
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
|
||||
},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2021-20231",
|
||||
"Title": "gnutls: Use after free in client key_share extension",
|
||||
"Description": "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
|
||||
"Severity": "CRITICAL",
|
||||
"CweIDs": [
|
||||
"CWE-416"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V2Score": 7.5,
|
||||
"V3Score": 9.8
|
||||
},
|
||||
"redhat": {
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:L",
|
||||
"V3Score": 3.7
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://bugzilla.redhat.com/show_bug.cgi?id=1922276"
|
||||
],
|
||||
"PublishedDate": "2021-03-12T19:15:00Z",
|
||||
"LastModifiedDate": "2021-06-01T14:07:00Z"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Target": "Ruby",
|
||||
"Class": "lang-pkgs",
|
||||
"Type": "gemspec",
|
||||
"Packages": [
|
||||
{
|
||||
"Name": "activesupport",
|
||||
"Version": "6.0.2.1",
|
||||
"License": "MIT",
|
||||
"Layer": {
|
||||
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
|
||||
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
|
||||
},
|
||||
"FilePath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec"
|
||||
}
|
||||
],
|
||||
"Vulnerabilities": [
|
||||
{
|
||||
"VulnerabilityID": "CVE-2020-8165",
|
||||
"PkgName": "activesupport",
|
||||
"PkgPath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
|
||||
"InstalledVersion": "6.0.2.1",
|
||||
"FixedVersion": "6.0.3.1, 5.2.4.3",
|
||||
"Layer": {
|
||||
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
|
||||
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
|
||||
},
|
||||
"SeveritySource": "nvd",
|
||||
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2020-8165",
|
||||
"Title": "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
|
||||
"Description": "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
|
||||
"Severity": "CRITICAL",
|
||||
"CweIDs": [
|
||||
"CWE-502"
|
||||
],
|
||||
"CVSS": {
|
||||
"nvd": {
|
||||
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V2Score": 7.5,
|
||||
"V3Score": 9.8
|
||||
},
|
||||
"redhat": {
|
||||
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"V3Score": 9.8
|
||||
}
|
||||
},
|
||||
"References": [
|
||||
"https://www.debian.org/security/2020/dsa-4766"
|
||||
],
|
||||
"PublishedDate": "2020-06-19T18:15:00Z",
|
||||
"LastModifiedDate": "2020-10-17T12:15:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`)
|
||||
|
||||
var osAndLibSR = &models.ScanResult{
|
||||
JSONVersion: 4,
|
||||
ServerName: "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
Family: "debian",
|
||||
ScannedBy: "trivy",
|
||||
ScannedVia: "trivy",
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2021-20231": {
|
||||
CveID: "CVE-2021-20231",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
models.PackageFixStatus{
|
||||
Name: "libgnutls30",
|
||||
NotFixedYet: false,
|
||||
FixState: "",
|
||||
FixedIn: "3.6.7-4+deb10u7",
|
||||
}},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "gnutls: Use after free in client key_share extension",
|
||||
Summary: "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
|
||||
Cvss3Severity: "CRITICAL",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://bugzilla.redhat.com/show_bug.cgi?id=1922276"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
},
|
||||
"CVE-2020-8165": {
|
||||
CveID: "CVE-2020-8165",
|
||||
Confidences: models.Confidences{
|
||||
models.Confidence{
|
||||
Score: 100,
|
||||
DetectionMethod: "TrivyMatch",
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{
|
||||
"trivy": []models.CveContent{{
|
||||
Title: "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
|
||||
Summary: "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
|
||||
Cvss3Severity: "CRITICAL",
|
||||
References: models.References{
|
||||
{Source: "trivy", Link: "https://www.debian.org/security/2020/dsa-4766"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
LibraryFixedIns: models.LibraryFixedIns{
|
||||
models.LibraryFixedIn{
|
||||
Key: "gemspec",
|
||||
Name: "activesupport",
|
||||
FixedIn: "6.0.3.1, 5.2.4.3",
|
||||
Path: "Ruby",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
LibraryScanners: models.LibraryScanners{
|
||||
models.LibraryScanner{
|
||||
Type: "gemspec",
|
||||
LockfilePath: "Ruby",
|
||||
Libs: []models.Library{
|
||||
{
|
||||
Name: "activesupport",
|
||||
Version: "6.0.2.1",
|
||||
FilePath: "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Packages: models.Packages{
|
||||
"libgnutls30": models.Package{
|
||||
Name: "libgnutls30",
|
||||
Version: "3.6.7-4",
|
||||
},
|
||||
},
|
||||
SrcPackages: models.SrcPackages{
|
||||
"gnutls28": models.SrcPackage{
|
||||
Name: "gnutls28",
|
||||
Version: "3.6.7-4",
|
||||
BinaryNames: []string{"libgnutls30"},
|
||||
},
|
||||
},
|
||||
Optional: map[string]interface{}{
|
||||
"trivy-target": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
|
||||
},
|
||||
}
|
||||
228
contrib/trivy/pkg/converter.go
Normal file
228
contrib/trivy/pkg/converter.go
Normal file
@@ -0,0 +1,228 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
ftypes "github.com/aquasecurity/fanal/types"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// Convert :
|
||||
func Convert(results report.Results) (result *models.ScanResult, err error) {
|
||||
scanResult := &models.ScanResult{
|
||||
JSONVersion: models.JSONVersion,
|
||||
ScannedCves: models.VulnInfos{},
|
||||
}
|
||||
|
||||
pkgs := models.Packages{}
|
||||
srcPkgs := models.SrcPackages{}
|
||||
vulnInfos := models.VulnInfos{}
|
||||
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
|
||||
for _, trivyResult := range results {
|
||||
for _, vuln := range trivyResult.Vulnerabilities {
|
||||
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
|
||||
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
|
||||
CveID: vuln.VulnerabilityID,
|
||||
Confidences: models.Confidences{
|
||||
{
|
||||
Score: 100,
|
||||
DetectionMethod: models.TrivyMatchStr,
|
||||
},
|
||||
},
|
||||
AffectedPackages: models.PackageFixStatuses{},
|
||||
CveContents: models.CveContents{},
|
||||
LibraryFixedIns: models.LibraryFixedIns{},
|
||||
// VulnType : "",
|
||||
}
|
||||
}
|
||||
vulnInfo := vulnInfos[vuln.VulnerabilityID]
|
||||
var notFixedYet bool
|
||||
fixState := ""
|
||||
if len(vuln.FixedVersion) == 0 {
|
||||
notFixedYet = true
|
||||
fixState = "Affected"
|
||||
}
|
||||
var references models.References
|
||||
for _, reference := range vuln.References {
|
||||
references = append(references, models.Reference{
|
||||
Source: "trivy",
|
||||
Link: reference,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(references, func(i, j int) bool {
|
||||
return references[i].Link < references[j].Link
|
||||
})
|
||||
|
||||
var published time.Time
|
||||
if vuln.PublishedDate != nil {
|
||||
published = *vuln.PublishedDate
|
||||
}
|
||||
|
||||
var lastModified time.Time
|
||||
if vuln.LastModifiedDate != nil {
|
||||
lastModified = *vuln.LastModifiedDate
|
||||
}
|
||||
|
||||
vulnInfo.CveContents = models.CveContents{
|
||||
models.Trivy: []models.CveContent{{
|
||||
Cvss3Severity: vuln.Severity,
|
||||
References: references,
|
||||
Title: vuln.Title,
|
||||
Summary: vuln.Description,
|
||||
Published: published,
|
||||
LastModified: lastModified,
|
||||
}},
|
||||
}
|
||||
// do onlyIif image type is Vuln
|
||||
if IsTrivySupportedOS(trivyResult.Type) {
|
||||
pkgs[vuln.PkgName] = models.Package{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
}
|
||||
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
|
||||
Name: vuln.PkgName,
|
||||
NotFixedYet: notFixedYet,
|
||||
FixState: fixState,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
} else {
|
||||
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
|
||||
Key: trivyResult.Type,
|
||||
Name: vuln.PkgName,
|
||||
Path: trivyResult.Target,
|
||||
FixedIn: vuln.FixedVersion,
|
||||
})
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Type = trivyResult.Type
|
||||
libScanner.Libs = append(libScanner.Libs, models.Library{
|
||||
Name: vuln.PkgName,
|
||||
Version: vuln.InstalledVersion,
|
||||
FilePath: vuln.PkgPath,
|
||||
})
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
vulnInfos[vuln.VulnerabilityID] = vulnInfo
|
||||
}
|
||||
|
||||
// --list-all-pkgs flg of trivy will output all installed packages, so collect them.
|
||||
if trivyResult.Class == report.ClassOSPkg {
|
||||
for _, p := range trivyResult.Packages {
|
||||
pkgs[p.Name] = models.Package{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
}
|
||||
if p.Name != p.SrcName {
|
||||
if v, ok := srcPkgs[p.SrcName]; !ok {
|
||||
srcPkgs[p.SrcName] = models.SrcPackage{
|
||||
Name: p.SrcName,
|
||||
Version: p.SrcVersion,
|
||||
BinaryNames: []string{p.Name},
|
||||
}
|
||||
} else {
|
||||
v.AddBinaryName(p.Name)
|
||||
srcPkgs[p.SrcName] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if trivyResult.Class == report.ClassLangPkg {
|
||||
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
|
||||
libScanner.Type = trivyResult.Type
|
||||
for _, p := range trivyResult.Packages {
|
||||
libScanner.Libs = append(libScanner.Libs, models.Library{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
FilePath: p.FilePath,
|
||||
})
|
||||
}
|
||||
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
|
||||
}
|
||||
}
|
||||
|
||||
// flatten and unique libraries
|
||||
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
|
||||
for path, v := range uniqueLibraryScannerPaths {
|
||||
uniqueLibrary := map[string]models.Library{}
|
||||
for _, lib := range v.Libs {
|
||||
uniqueLibrary[lib.Name+lib.Version] = lib
|
||||
}
|
||||
|
||||
var libraries []models.Library
|
||||
for _, library := range uniqueLibrary {
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
sort.Slice(libraries, func(i, j int) bool {
|
||||
return libraries[i].Name < libraries[j].Name
|
||||
})
|
||||
|
||||
libscanner := models.LibraryScanner{
|
||||
Type: v.Type,
|
||||
LockfilePath: path,
|
||||
Libs: libraries,
|
||||
}
|
||||
libraryScanners = append(libraryScanners, libscanner)
|
||||
}
|
||||
sort.Slice(libraryScanners, func(i, j int) bool {
|
||||
return libraryScanners[i].LockfilePath < libraryScanners[j].LockfilePath
|
||||
})
|
||||
scanResult.ScannedCves = vulnInfos
|
||||
scanResult.Packages = pkgs
|
||||
scanResult.SrcPackages = srcPkgs
|
||||
scanResult.LibraryScanners = libraryScanners
|
||||
return scanResult, nil
|
||||
}
|
||||
|
||||
// IsTrivySupportedOS :
|
||||
func IsTrivySupportedOS(family string) bool {
|
||||
supportedFamilies := map[string]interface{}{
|
||||
os.RedHat: struct{}{},
|
||||
os.Debian: struct{}{},
|
||||
os.Ubuntu: struct{}{},
|
||||
os.CentOS: struct{}{},
|
||||
os.Rocky: struct{}{},
|
||||
os.Alma: struct{}{},
|
||||
os.Fedora: struct{}{},
|
||||
os.Amazon: struct{}{},
|
||||
os.Oracle: struct{}{},
|
||||
os.Windows: struct{}{},
|
||||
os.OpenSUSE: struct{}{},
|
||||
os.OpenSUSELeap: struct{}{},
|
||||
os.OpenSUSETumbleweed: struct{}{},
|
||||
os.SLES: struct{}{},
|
||||
os.Photon: struct{}{},
|
||||
os.Alpine: struct{}{},
|
||||
// os.Fedora: struct{}{}, not supported yet
|
||||
}
|
||||
_, ok := supportedFamilies[family]
|
||||
return ok
|
||||
}
|
||||
|
||||
// IsTrivySupportedLib :
|
||||
func IsTrivySupportedLib(typestr string) bool {
|
||||
supportedLibs := map[string]interface{}{
|
||||
ftypes.Bundler: struct{}{},
|
||||
ftypes.GemSpec: struct{}{},
|
||||
ftypes.Cargo: struct{}{},
|
||||
ftypes.Composer: struct{}{},
|
||||
ftypes.Npm: struct{}{},
|
||||
ftypes.NuGet: struct{}{},
|
||||
ftypes.Pip: struct{}{},
|
||||
ftypes.Pipenv: struct{}{},
|
||||
ftypes.Poetry: struct{}{},
|
||||
ftypes.PythonPkg: struct{}{},
|
||||
ftypes.NodePkg: struct{}{},
|
||||
ftypes.Yarn: struct{}{},
|
||||
ftypes.Jar: struct{}{},
|
||||
ftypes.Pom: struct{}{},
|
||||
ftypes.GoBinary: struct{}{},
|
||||
ftypes.GoMod: struct{}{},
|
||||
}
|
||||
_, ok := supportedLibs[typestr]
|
||||
return ok
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
@@ -15,9 +16,9 @@ import (
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/util"
|
||||
cvedb "github.com/kotakanbe/go-cve-dictionary/db"
|
||||
cvelog "github.com/kotakanbe/go-cve-dictionary/log"
|
||||
cvemodels "github.com/kotakanbe/go-cve-dictionary/models"
|
||||
cvedb "github.com/vulsio/go-cve-dictionary/db"
|
||||
cvelog "github.com/vulsio/go-cve-dictionary/log"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
type goCveDictClient struct {
|
||||
@@ -26,7 +27,9 @@ type goCveDictClient struct {
|
||||
}
|
||||
|
||||
func newGoCveDictClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goCveDictClient, error) {
|
||||
cvelog.SetLogger(o.LogDir, o.Quiet, o.Debug, false)
|
||||
if err := cvelog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
driver, locked, err := newCveDB(cnf)
|
||||
if locked {
|
||||
@@ -41,25 +44,18 @@ func (api goCveDictClient) closeDB() error {
|
||||
if api.driver == nil {
|
||||
return nil
|
||||
}
|
||||
if err := api.driver.CloseDB(); err != nil {
|
||||
return xerrors.Errorf("Failed to close DB: %+v", err)
|
||||
}
|
||||
return nil
|
||||
return api.driver.CloseDB()
|
||||
}
|
||||
|
||||
func (api goCveDictClient) fetchCveDetails(cveIDs []string) (cveDetails []cvemodels.CveDetail, err error) {
|
||||
for _, cveID := range cveIDs {
|
||||
cveDetail, err := api.driver.Get(cveID)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fetch CVE. err: %w", err)
|
||||
}
|
||||
if len(cveDetail.CveID) == 0 {
|
||||
cveDetails = append(cveDetails, cvemodels.CveDetail{CveID: cveID})
|
||||
} else {
|
||||
cveDetails = append(cveDetails, *cveDetail)
|
||||
}
|
||||
m, err := api.driver.GetMulti(cveIDs)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to GetMulti. err: %w", err)
|
||||
}
|
||||
return
|
||||
for _, v := range m {
|
||||
cveDetails = append(cveDetails, v)
|
||||
}
|
||||
return cveDetails, nil
|
||||
}
|
||||
|
||||
type response struct {
|
||||
@@ -103,13 +99,7 @@ func (api goCveDictClient) fetchCveDetailsViaHTTP(cveIDs []string) (cveDetails [
|
||||
for range cveIDs {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
if len(res.CveDetail.CveID) == 0 {
|
||||
cveDetails = append(cveDetails, cvemodels.CveDetail{
|
||||
CveID: res.Key,
|
||||
})
|
||||
} else {
|
||||
cveDetails = append(cveDetails, res.CveDetail)
|
||||
}
|
||||
cveDetails = append(cveDetails, res.CveDetail)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
@@ -154,21 +144,40 @@ func (api goCveDictClient) httpGet(key, url string, resChan chan<- response, err
|
||||
}
|
||||
}
|
||||
|
||||
func (api goCveDictClient) fetchCveDetailsByCpeName(cpeName string) ([]cvemodels.CveDetail, error) {
|
||||
func (api goCveDictClient) detectCveByCpeURI(cpeURI string, useJVN bool) (cves []cvemodels.CveDetail, err error) {
|
||||
if api.cnf.IsFetchViaHTTP() {
|
||||
url, err := util.URLPathJoin(api.cnf.GetURL(), "cpes")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := map[string]string{"name": cpeName}
|
||||
query := map[string]string{"name": cpeURI}
|
||||
logging.Log.Debugf("HTTP Request to %s, query: %#v", url, query)
|
||||
return api.httpPost(cpeName, url, query)
|
||||
if cves, err = api.httpPost(url, query); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
if cves, err = api.driver.GetByCpeURI(cpeURI); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return api.driver.GetByCpeURI(cpeName)
|
||||
|
||||
if useJVN {
|
||||
return cves, nil
|
||||
}
|
||||
|
||||
nvdCves := []cvemodels.CveDetail{}
|
||||
for _, cve := range cves {
|
||||
if !cve.HasNvd() {
|
||||
continue
|
||||
}
|
||||
cve.Jvns = []cvemodels.Jvn{}
|
||||
nvdCves = append(nvdCves, cve)
|
||||
}
|
||||
return nvdCves, nil
|
||||
}
|
||||
|
||||
func (api goCveDictClient) httpPost(key, url string, query map[string]string) ([]cvemodels.CveDetail, error) {
|
||||
func (api goCveDictClient) httpPost(url string, query map[string]string) ([]cvemodels.CveDetail, error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
@@ -207,7 +216,7 @@ func newCveDB(cnf config.VulnDictInterface) (driver cvedb.DB, locked bool, err e
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
driver, locked, err = cvedb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL())
|
||||
driver, locked, err = cvedb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), cvedb.Option{})
|
||||
if err != nil {
|
||||
err = xerrors.Errorf("Failed to init CVE DB. err: %w, path: %s", err, path)
|
||||
return nil, locked, err
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
@@ -17,10 +18,16 @@ import (
|
||||
"github.com/future-architect/vuls/oval"
|
||||
"github.com/future-architect/vuls/reporter"
|
||||
"github.com/future-architect/vuls/util"
|
||||
cvemodels "github.com/kotakanbe/go-cve-dictionary/models"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Cpe :
|
||||
type Cpe struct {
|
||||
CpeURI string
|
||||
UseJVN bool
|
||||
}
|
||||
|
||||
// Detect vulns and fill CVE detailed information
|
||||
func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
|
||||
@@ -36,7 +43,16 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
r.ScannedCves = models.VulnInfos{}
|
||||
}
|
||||
|
||||
if err := DetectLibsCves(&r, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Library dependency: %w", err)
|
||||
}
|
||||
|
||||
if err := DetectPkgCves(&r, config.Conf.OvalDict, config.Conf.Gost); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect Pkg CVE: %w", err)
|
||||
}
|
||||
|
||||
cpeURIs, owaspDCXMLPath := []string{}, ""
|
||||
cpes := []Cpe{}
|
||||
if len(r.Container.ContainerID) == 0 {
|
||||
cpeURIs = config.Conf.Servers[r.ServerName].CpeNames
|
||||
owaspDCXMLPath = config.Conf.Servers[r.ServerName].OwaspDCXMLPath
|
||||
@@ -56,21 +72,18 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
}
|
||||
cpeURIs = append(cpeURIs, cpes...)
|
||||
}
|
||||
|
||||
if err := DetectLibsCves(&r, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Library dependency: %w", err)
|
||||
for _, uri := range cpeURIs {
|
||||
cpes = append(cpes, Cpe{
|
||||
CpeURI: uri,
|
||||
UseJVN: true,
|
||||
})
|
||||
}
|
||||
|
||||
if err := DetectPkgCves(&r, config.Conf.OvalDict, config.Conf.Gost); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect Pkg CVE: %w", err)
|
||||
}
|
||||
|
||||
if err := DetectCpeURIsCves(&r, cpeURIs, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
if err := DetectCpeURIsCves(&r, cpes, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect CVE of `%s`: %w", cpeURIs, err)
|
||||
}
|
||||
|
||||
repos := config.Conf.Servers[r.ServerName].GitHubRepos
|
||||
if err := DetectGitHubCves(&r, repos, config.Conf.IgnoreGitHubDismissed); err != nil {
|
||||
if err := DetectGitHubCves(&r, repos); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to detect GitHub Cves: %w", err)
|
||||
}
|
||||
|
||||
@@ -78,12 +91,10 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
return nil, xerrors.Errorf("Failed to detect WordPress Cves: %w", err)
|
||||
}
|
||||
|
||||
logging.Log.Infof("Fill CVE detailed with gost")
|
||||
if err := gost.FillCVEsWithRedHat(&r, config.Conf.Gost); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with gost: %w", err)
|
||||
}
|
||||
|
||||
logging.Log.Infof("Fill CVE detailed with go-cve-dictionary")
|
||||
if err := FillCvesWithNvdJvn(&r, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with CVE: %w", err)
|
||||
}
|
||||
@@ -92,13 +103,17 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with exploit: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d exploits are detected", r.FormatServerName(), nExploitCve)
|
||||
logging.Log.Infof("%s: %d PoC are detected", r.FormatServerName(), nExploitCve)
|
||||
|
||||
nMetasploitCve, err := FillWithMetasploit(&r, config.Conf.Metasploit)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with metasploit: %w", err)
|
||||
}
|
||||
logging.Log.Infof("%s: %d modules are detected", r.FormatServerName(), nMetasploitCve)
|
||||
logging.Log.Infof("%s: %d exploits are detected", r.FormatServerName(), nMetasploitCve)
|
||||
|
||||
if err := FillWithKEVuln(&r, config.Conf.KEVuln); err != nil {
|
||||
return nil, xerrors.Errorf("Failed to fill with Known Exploited Vulnerabilities: %w", err)
|
||||
}
|
||||
|
||||
FillCweDict(&r)
|
||||
|
||||
@@ -134,8 +149,23 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
}
|
||||
|
||||
for i, r := range rs {
|
||||
r.ScannedCves = r.ScannedCves.FilterByCvssOver(config.Conf.CvssScoreOver)
|
||||
r.ScannedCves = r.ScannedCves.FilterUnfixed(config.Conf.IgnoreUnfixed)
|
||||
nFiltered := 0
|
||||
logging.Log.Infof("%s: total %d CVEs detected", r.FormatServerName(), len(r.ScannedCves))
|
||||
|
||||
if 0 < config.Conf.CvssScoreOver {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterByCvssOver(config.Conf.CvssScoreOver)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --cvss-over=%g", r.FormatServerName(), nFiltered, config.Conf.CvssScoreOver)
|
||||
}
|
||||
|
||||
if config.Conf.IgnoreUnfixed {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterUnfixed(config.Conf.IgnoreUnfixed)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unfixed", r.FormatServerName(), nFiltered)
|
||||
}
|
||||
|
||||
if 0 < config.Conf.ConfidenceScoreOver {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterByConfidenceOver(config.Conf.ConfidenceScoreOver)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --confidence-over=%d", r.FormatServerName(), nFiltered, config.Conf.ConfidenceScoreOver)
|
||||
}
|
||||
|
||||
// IgnoreCves
|
||||
ignoreCves := []string{}
|
||||
@@ -144,7 +174,10 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
} else if con, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
|
||||
ignoreCves = con.IgnoreCves
|
||||
}
|
||||
r.ScannedCves = r.ScannedCves.FilterIgnoreCves(ignoreCves)
|
||||
if 0 < len(ignoreCves) {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnoreCves(ignoreCves)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by ignoreCves=%s", r.FormatServerName(), nFiltered, ignoreCves)
|
||||
}
|
||||
|
||||
// ignorePkgs
|
||||
ignorePkgsRegexps := []string{}
|
||||
@@ -153,11 +186,15 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
} else if s, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
|
||||
ignorePkgsRegexps = s.IgnorePkgsRegexp
|
||||
}
|
||||
r.ScannedCves = r.ScannedCves.FilterIgnorePkgs(ignorePkgsRegexps)
|
||||
if 0 < len(ignorePkgsRegexps) {
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnorePkgs(ignorePkgsRegexps)
|
||||
logging.Log.Infof("%s: %d CVEs filtered by ignorePkgsRegexp=%s", r.FormatServerName(), nFiltered, ignorePkgsRegexps)
|
||||
}
|
||||
|
||||
// IgnoreUnscored
|
||||
if config.Conf.IgnoreUnscoredCves {
|
||||
r.ScannedCves = r.ScannedCves.FindScoredVulns()
|
||||
r.ScannedCves, nFiltered = r.ScannedCves.FindScoredVulns()
|
||||
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unscored-cves", r.FormatServerName(), nFiltered)
|
||||
}
|
||||
|
||||
r.FilterInactiveWordPressLibs(config.Conf.WpScan.DetectInactive)
|
||||
@@ -171,21 +208,30 @@ func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
|
||||
func DetectPkgCves(r *models.ScanResult, ovalCnf config.GovalDictConf, gostCnf config.GostConf) error {
|
||||
// Pkg Scan
|
||||
if r.Release != "" {
|
||||
// OVAL
|
||||
if err := detectPkgsCvesWithOval(ovalCnf, r); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with OVAL: %w", err)
|
||||
}
|
||||
if len(r.Packages)+len(r.SrcPackages) > 0 {
|
||||
// OVAL, gost(Debian Security Tracker) does not support Package for Raspbian, so skip it.
|
||||
if r.Family == constant.Raspbian {
|
||||
r = r.RemoveRaspbianPackFromResult()
|
||||
}
|
||||
|
||||
// gost
|
||||
if err := detectPkgsCvesWithGost(gostCnf, r); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with gost: %w", err)
|
||||
// OVAL
|
||||
if err := detectPkgsCvesWithOval(ovalCnf, r); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with OVAL: %w", err)
|
||||
}
|
||||
|
||||
// gost
|
||||
if err := detectPkgsCvesWithGost(gostCnf, r); err != nil {
|
||||
return xerrors.Errorf("Failed to detect CVE with gost: %w", err)
|
||||
}
|
||||
} else {
|
||||
logging.Log.Infof("Number of packages is 0. Skip OVAL and gost detection")
|
||||
}
|
||||
} else if reuseScannedCves(r) {
|
||||
logging.Log.Infof("r.Release is empty. Use CVEs as it as.")
|
||||
} else if r.Family == constant.ServerTypePseudo {
|
||||
logging.Log.Infof("pseudo type. Skip OVAL and gost detection")
|
||||
} else {
|
||||
return xerrors.Errorf("Failed to fill CVEs. r.Release is empty")
|
||||
logging.Log.Infof("r.Release is empty. detect as pseudo type. Skip OVAL and gost detection")
|
||||
}
|
||||
|
||||
for i, v := range r.ScannedCves {
|
||||
@@ -219,7 +265,7 @@ func DetectPkgCves(r *models.ScanResult, ovalCnf config.GovalDictConf, gostCnf c
|
||||
}
|
||||
|
||||
// DetectGitHubCves fetches CVEs from GitHub Security Alerts
|
||||
func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHubConf, ignoreDismissed bool) error {
|
||||
func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHubConf) error {
|
||||
if len(githubConfs) == 0 {
|
||||
return nil
|
||||
}
|
||||
@@ -229,7 +275,7 @@ func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHub
|
||||
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s", ownerRepo)
|
||||
}
|
||||
owner, repo := ss[0], ss[1]
|
||||
n, err := DetectGitHubSecurityAlerts(r, owner, repo, setting.Token, ignoreDismissed)
|
||||
n, err := DetectGitHubSecurityAlerts(r, owner, repo, setting.Token, setting.IgnoreGitHubDismissed)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to access GitHub Security Alerts: %w", err)
|
||||
}
|
||||
@@ -244,7 +290,7 @@ func DetectWordPressCves(r *models.ScanResult, wpCnf config.WpScanConf) error {
|
||||
if len(r.WordPressPackages) == 0 {
|
||||
return nil
|
||||
}
|
||||
logging.Log.Infof("Detect WordPress CVE. pkgs: %d ", len(r.WordPressPackages))
|
||||
logging.Log.Infof("%s: Detect WordPress CVE. Number of pkgs: %d ", r.ServerInfo(), len(r.WordPressPackages))
|
||||
n, err := detectWordPressCves(r, wpCnf)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to detect WordPress CVE: %w", err)
|
||||
@@ -281,8 +327,8 @@ func FillCvesWithNvdJvn(r *models.ScanResult, cnf config.GoCveDictConf, logOpts
|
||||
}
|
||||
|
||||
for _, d := range ds {
|
||||
nvd, exploits, mitigations := models.ConvertNvdJSONToModel(d.CveID, d.NvdJSON)
|
||||
jvn := models.ConvertJvnToModel(d.CveID, d.Jvn)
|
||||
nvds, exploits, mitigations := models.ConvertNvdToModel(d.CveID, d.Nvds)
|
||||
jvns := models.ConvertJvnToModel(d.CveID, d.Jvns)
|
||||
|
||||
alerts := fillCertAlerts(&d)
|
||||
for cveID, vinfo := range r.ScannedCves {
|
||||
@@ -290,9 +336,23 @@ func FillCvesWithNvdJvn(r *models.ScanResult, cnf config.GoCveDictConf, logOpts
|
||||
if vinfo.CveContents == nil {
|
||||
vinfo.CveContents = models.CveContents{}
|
||||
}
|
||||
for _, con := range []*models.CveContent{nvd, jvn} {
|
||||
if con != nil && !con.Empty() {
|
||||
vinfo.CveContents[con.Type] = *con
|
||||
for _, con := range nvds {
|
||||
if !con.Empty() {
|
||||
vinfo.CveContents[con.Type] = []models.CveContent{con}
|
||||
}
|
||||
}
|
||||
for _, con := range jvns {
|
||||
if !con.Empty() {
|
||||
found := false
|
||||
for _, cveCont := range vinfo.CveContents[con.Type] {
|
||||
if con.SourceLink == cveCont.SourceLink {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
vinfo.CveContents[con.Type] = append(vinfo.CveContents[con.Type], con)
|
||||
}
|
||||
}
|
||||
}
|
||||
vinfo.AlertDict = alerts
|
||||
@@ -307,24 +367,26 @@ func FillCvesWithNvdJvn(r *models.ScanResult, cnf config.GoCveDictConf, logOpts
|
||||
}
|
||||
|
||||
func fillCertAlerts(cvedetail *cvemodels.CveDetail) (dict models.AlertDict) {
|
||||
if cvedetail.NvdJSON != nil {
|
||||
for _, cert := range cvedetail.NvdJSON.Certs {
|
||||
dict.En = append(dict.En, models.Alert{
|
||||
for _, nvd := range cvedetail.Nvds {
|
||||
for _, cert := range nvd.Certs {
|
||||
dict.USCERT = append(dict.USCERT, models.Alert{
|
||||
URL: cert.Link,
|
||||
Title: cert.Title,
|
||||
Team: "us",
|
||||
Team: "uscert",
|
||||
})
|
||||
}
|
||||
}
|
||||
if cvedetail.Jvn != nil {
|
||||
for _, cert := range cvedetail.Jvn.Certs {
|
||||
dict.Ja = append(dict.Ja, models.Alert{
|
||||
|
||||
for _, jvn := range cvedetail.Jvns {
|
||||
for _, cert := range jvn.Certs {
|
||||
dict.JPCERT = append(dict.JPCERT, models.Alert{
|
||||
URL: cert.Link,
|
||||
Title: cert.Title,
|
||||
Team: "jp",
|
||||
Team: "jpcert",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return dict
|
||||
}
|
||||
|
||||
@@ -334,6 +396,9 @@ func detectPkgsCvesWithOval(cnf config.GovalDictConf, r *models.ScanResult) erro
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ovalClient == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Check if oval fetched: %s %s", r.Family, r.Release)
|
||||
ok, err := ovalClient.CheckIfOvalFetched(r.Family, r.Release)
|
||||
@@ -341,7 +406,12 @@ func detectPkgsCvesWithOval(cnf config.GovalDictConf, r *models.ScanResult) erro
|
||||
return err
|
||||
}
|
||||
if !ok {
|
||||
return xerrors.Errorf("OVAL entries of %s %s are not found. Fetch OVAL before reporting. For details, see `https://github.com/kotakanbe/goval-dictionary#usage`", r.Family, r.Release)
|
||||
if r.Family == constant.Debian {
|
||||
logging.Log.Infof("Skip OVAL and Scan with gost alone.")
|
||||
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), 0)
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("OVAL entries of %s %s are not found. Fetch OVAL before reporting. For details, see `https://github.com/vulsio/goval-dictionary#usage`", r.Family, r.Release)
|
||||
}
|
||||
|
||||
logging.Log.Debugf("Check if oval fresh: %s %s", r.Family, r.Release)
|
||||
@@ -366,17 +436,32 @@ func detectPkgsCvesWithGost(cnf config.GostConf, r *models.ScanResult) error {
|
||||
return xerrors.Errorf("Failed to new a gost client: %w", err)
|
||||
}
|
||||
|
||||
nCVEs, err := client.DetectUnfixed(r, true)
|
||||
defer func() {
|
||||
if err := client.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close the gost DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
nCVEs, err := client.DetectCVEs(r, true)
|
||||
if err != nil {
|
||||
if r.Family == constant.Debian {
|
||||
return xerrors.Errorf("Failed to detect CVEs with gost: %w", err)
|
||||
}
|
||||
return xerrors.Errorf("Failed to detect unfixed CVEs with gost: %w", err)
|
||||
}
|
||||
|
||||
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost", r.FormatServerName(), nCVEs)
|
||||
if r.Family == constant.Debian {
|
||||
logging.Log.Infof("%s: %d CVEs are detected with gost",
|
||||
r.FormatServerName(), nCVEs)
|
||||
} else {
|
||||
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost",
|
||||
r.FormatServerName(), nCVEs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectCpeURIsCves detects CVEs of given CPE-URIs
|
||||
func DetectCpeURIsCves(r *models.ScanResult, cpeURIs []string, cnf config.GoCveDictConf, logOpts logging.LogOpts) error {
|
||||
func DetectCpeURIsCves(r *models.ScanResult, cpes []Cpe, cnf config.GoCveDictConf, logOpts logging.LogOpts) error {
|
||||
client, err := newGoCveDictClient(&cnf, logOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -388,23 +473,34 @@ func DetectCpeURIsCves(r *models.ScanResult, cpeURIs []string, cnf config.GoCveD
|
||||
}()
|
||||
|
||||
nCVEs := 0
|
||||
for _, name := range cpeURIs {
|
||||
details, err := client.fetchCveDetailsByCpeName(name)
|
||||
for _, cpe := range cpes {
|
||||
details, err := client.detectCveByCpeURI(cpe.CpeURI, cpe.UseJVN)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, detail := range details {
|
||||
advisories := []models.DistroAdvisory{}
|
||||
if !detail.HasNvd() && detail.HasJvn() {
|
||||
for _, jvn := range detail.Jvns {
|
||||
advisories = append(advisories, models.DistroAdvisory{
|
||||
AdvisoryID: jvn.JvnID,
|
||||
})
|
||||
}
|
||||
}
|
||||
maxConfidence := getMaxConfidence(detail)
|
||||
|
||||
if val, ok := r.ScannedCves[detail.CveID]; ok {
|
||||
names := val.CpeURIs
|
||||
names = util.AppendIfMissing(names, name)
|
||||
val.CpeURIs = names
|
||||
val.Confidences.AppendIfMissing(models.CpeNameMatch)
|
||||
val.CpeURIs = util.AppendIfMissing(val.CpeURIs, cpe.CpeURI)
|
||||
val.Confidences.AppendIfMissing(maxConfidence)
|
||||
val.DistroAdvisories = advisories
|
||||
r.ScannedCves[detail.CveID] = val
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
CveID: detail.CveID,
|
||||
CpeURIs: []string{name},
|
||||
Confidences: models.Confidences{models.CpeNameMatch},
|
||||
CveID: detail.CveID,
|
||||
CpeURIs: []string{cpe.CpeURI},
|
||||
Confidences: models.Confidences{maxConfidence},
|
||||
DistroAdvisories: advisories,
|
||||
}
|
||||
r.ScannedCves[detail.CveID] = v
|
||||
nCVEs++
|
||||
@@ -415,15 +511,39 @@ func DetectCpeURIsCves(r *models.ScanResult, cpeURIs []string, cnf config.GoCveD
|
||||
return nil
|
||||
}
|
||||
|
||||
func getMaxConfidence(detail cvemodels.CveDetail) (max models.Confidence) {
|
||||
if !detail.HasNvd() && detail.HasJvn() {
|
||||
return models.JvnVendorProductMatch
|
||||
} else if detail.HasNvd() {
|
||||
for _, nvd := range detail.Nvds {
|
||||
confidence := models.Confidence{}
|
||||
switch nvd.DetectionMethod {
|
||||
case cvemodels.NvdExactVersionMatch:
|
||||
confidence = models.NvdExactVersionMatch
|
||||
case cvemodels.NvdRoughVersionMatch:
|
||||
confidence = models.NvdRoughVersionMatch
|
||||
case cvemodels.NvdVendorProductMatch:
|
||||
confidence = models.NvdVendorProductMatch
|
||||
}
|
||||
if max.Score < confidence.Score {
|
||||
max = confidence
|
||||
}
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
// FillCweDict fills CWE
|
||||
func FillCweDict(r *models.ScanResult) {
|
||||
uniqCweIDMap := map[string]bool{}
|
||||
for _, vinfo := range r.ScannedCves {
|
||||
for _, cont := range vinfo.CveContents {
|
||||
for _, id := range cont.CweIDs {
|
||||
if strings.HasPrefix(id, "CWE-") {
|
||||
id = strings.TrimPrefix(id, "CWE-")
|
||||
uniqCweIDMap[id] = true
|
||||
for _, conts := range vinfo.CveContents {
|
||||
for _, cont := range conts {
|
||||
for _, id := range cont.CweIDs {
|
||||
if strings.HasPrefix(id, "CWE-") {
|
||||
id = strings.TrimPrefix(id, "CWE-")
|
||||
uniqCweIDMap[id] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
90
detector/detector_test.go
Normal file
90
detector/detector_test.go
Normal file
@@ -0,0 +1,90 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
cvemodels "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
func Test_getMaxConfidence(t *testing.T) {
|
||||
type args struct {
|
||||
detail cvemodels.CveDetail
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantMax models.Confidence
|
||||
}{
|
||||
{
|
||||
name: "JvnVendorProductMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{},
|
||||
Jvns: []cvemodels.Jvn{{}},
|
||||
},
|
||||
},
|
||||
wantMax: models.JvnVendorProductMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdExactVersionMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
{DetectionMethod: cvemodels.NvdExactVersionMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdExactVersionMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdRoughVersionMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdRoughVersionMatch,
|
||||
},
|
||||
{
|
||||
name: "NvdVendorProductMatch",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{
|
||||
{DetectionMethod: cvemodels.NvdVendorProductMatch},
|
||||
},
|
||||
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
|
||||
},
|
||||
},
|
||||
wantMax: models.NvdVendorProductMatch,
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
args: args{
|
||||
detail: cvemodels.CveDetail{
|
||||
Nvds: []cvemodels.Nvd{},
|
||||
Jvns: []cvemodels.Jvn{},
|
||||
},
|
||||
},
|
||||
wantMax: models.Confidence{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if gotMax := getMaxConfidence(tt.args.detail); !reflect.DeepEqual(gotMax, tt.wantMax) {
|
||||
t.Errorf("getMaxConfidence() = %v, want %v", gotMax, tt.wantMax)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
@@ -27,16 +28,16 @@ func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf) (nExploitCve
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, _ := util.URLPathJoin(cnf.GetURL(), "cves")
|
||||
responses, err := getCvesViaHTTP(cveIDs, prefix)
|
||||
responses, err := getExploitsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
exps := []*exploitmodels.Exploit{}
|
||||
exps := []exploitmodels.Exploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
exploits := ConvertToModels(exps)
|
||||
exploits := ConvertToModelsExploit(exps)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Exploits = exploits
|
||||
@@ -45,7 +46,6 @@ func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf) (nExploitCve
|
||||
nExploitCve++
|
||||
}
|
||||
} else {
|
||||
|
||||
driver, locked, err := newExploitDB(&cnf)
|
||||
if locked {
|
||||
return 0, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
@@ -62,11 +62,14 @@ func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf) (nExploitCve
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
es := driver.GetExploitByCveID(cveID)
|
||||
es, err := driver.GetExploitByCveID(cveID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if len(es) == 0 {
|
||||
continue
|
||||
}
|
||||
exploits := ConvertToModels(es)
|
||||
exploits := ConvertToModelsExploit(es)
|
||||
vuln.Exploits = exploits
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nExploitCve++
|
||||
@@ -75,8 +78,8 @@ func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf) (nExploitCve
|
||||
return nExploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModels converts gost model to vuls model
|
||||
func ConvertToModels(es []*exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
// ConvertToModelsExploit converts exploit model to vuls model
|
||||
func ConvertToModelsExploit(es []exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
for _, e := range es {
|
||||
var documentURL, shellURL *string
|
||||
if e.OffensiveSecurity != nil {
|
||||
@@ -102,14 +105,14 @@ func ConvertToModels(es []*exploitmodels.Exploit) (exploits []models.Exploit) {
|
||||
}
|
||||
|
||||
type exploitResponse struct {
|
||||
request request
|
||||
request exploitRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
func getExploitsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []exploitResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan request, nReq)
|
||||
reqChan := make(chan exploitRequest, nReq)
|
||||
resChan := make(chan exploitResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
@@ -118,7 +121,7 @@ func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- request{
|
||||
reqChan <- exploitRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
@@ -128,18 +131,16 @@ func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
select {
|
||||
case req := <-reqChan:
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGet(url, req, resChan, errChan)
|
||||
}
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetExploit(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -153,23 +154,20 @@ func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching OVAL")
|
||||
return nil, xerrors.New("Timeout Fetching Exploit")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
return nil, xerrors.Errorf("Failed to fetch Exploit. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type request struct {
|
||||
osMajorVersion string
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cveID string
|
||||
type exploitRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGet(url string, req request, resChan chan<- exploitResponse, errChan chan<- error) {
|
||||
func httpGetExploit(url string, req exploitRequest, resChan chan<- exploitResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
@@ -213,7 +211,7 @@ func newExploitDB(cnf config.VulnDictInterface) (driver exploitdb.DB, locked boo
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = exploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL()); err != nil {
|
||||
if driver, locked, err = exploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), exploitdb.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("exploitDB is locked. err: %w", err)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
@@ -123,7 +126,7 @@ func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string,
|
||||
|
||||
if val, ok := r.ScannedCves[cveID]; ok {
|
||||
val.GitHubSecurityAlerts = val.GitHubSecurityAlerts.Add(m)
|
||||
val.CveContents[models.GitHub] = cveContent
|
||||
val.CveContents[models.GitHub] = []models.CveContent{cveContent}
|
||||
r.ScannedCves[cveID] = val
|
||||
} else {
|
||||
v := models.VulnInfo{
|
||||
|
||||
214
detector/kevuln.go
Normal file
214
detector/kevuln.go
Normal file
@@ -0,0 +1,214 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
kevulndb "github.com/vulsio/go-kev/db"
|
||||
kevulnmodels "github.com/vulsio/go-kev/models"
|
||||
)
|
||||
|
||||
// FillWithKEVuln :
|
||||
func FillWithKEVuln(r *models.ScanResult, cnf config.KEVulnConf) error {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(cnf.GetURL(), "cves")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
responses, err := getKEVulnsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, res := range responses {
|
||||
kevulns := []kevulnmodels.KEVuln{}
|
||||
if err := json.Unmarshal([]byte(res.json), &kevulns); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
alerts := []models.Alert{}
|
||||
if len(kevulns) > 0 {
|
||||
alerts = append(alerts, models.Alert{
|
||||
Title: "Known Exploited Vulnerabilities Catalog",
|
||||
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
|
||||
Team: "cisa",
|
||||
})
|
||||
}
|
||||
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.AlertDict.CISA = alerts
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
}
|
||||
} else {
|
||||
driver, locked, err := newKEVulnDB(&cnf)
|
||||
if locked {
|
||||
return xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
kevulns, err := driver.GetKEVulnByCveID(cveID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(kevulns) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
alerts := []models.Alert{}
|
||||
if len(kevulns) > 0 {
|
||||
alerts = append(alerts, models.Alert{
|
||||
Title: "Known Exploited Vulnerabilities Catalog",
|
||||
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
|
||||
Team: "cisa",
|
||||
})
|
||||
}
|
||||
|
||||
vuln.AlertDict.CISA = alerts
|
||||
r.ScannedCves[cveID] = vuln
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type kevulnResponse struct {
|
||||
request kevulnRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getKEVulnsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []kevulnResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan kevulnRequest, nReq)
|
||||
resChan := make(chan kevulnResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- kevulnRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetKEVuln(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching KEVuln")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch KEVuln. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type kevulnRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetKEVuln(url string, req kevulnRequest, resChan chan<- kevulnResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- kevulnResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
func newKEVulnDB(cnf config.VulnDictInterface) (driver kevulndb.DB, locked bool, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, false, nil
|
||||
}
|
||||
path := cnf.GetURL()
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = kevulndb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), kevulndb.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("kevulnDB is locked. err: %w", err)
|
||||
}
|
||||
return nil, false, err
|
||||
}
|
||||
return driver, false, nil
|
||||
}
|
||||
@@ -1,16 +1,16 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
db2 "github.com/aquasecurity/trivy-db/pkg/db"
|
||||
trivydb "github.com/aquasecurity/trivy-db/pkg/db"
|
||||
"github.com/aquasecurity/trivy-db/pkg/metadata"
|
||||
"github.com/aquasecurity/trivy/pkg/db"
|
||||
"github.com/aquasecurity/trivy/pkg/github"
|
||||
"github.com/aquasecurity/trivy/pkg/indicator"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/spf13/afero"
|
||||
"golang.org/x/xerrors"
|
||||
"k8s.io/utils/clock"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
@@ -30,14 +30,14 @@ func DetectLibsCves(r *models.ScanResult, cacheDir string, noProgress bool) (err
|
||||
}
|
||||
|
||||
logging.Log.Info("Updating library db...")
|
||||
if err := downloadDB("", cacheDir, noProgress, false, false); err != nil {
|
||||
if err := downloadDB("", cacheDir, noProgress, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := db2.Init(cacheDir); err != nil {
|
||||
if err := trivydb.Init(cacheDir); err != nil {
|
||||
return err
|
||||
}
|
||||
defer db2.Close()
|
||||
defer trivydb.Close()
|
||||
|
||||
for _, lib := range r.LibraryScanners {
|
||||
vinfos, err := lib.Scan()
|
||||
@@ -62,10 +62,10 @@ func DetectLibsCves(r *models.ScanResult, cacheDir string, noProgress bool) (err
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadDB(appVersion, cacheDir string, quiet, light, skipUpdate bool) error {
|
||||
client := initializeDBClient(cacheDir, quiet)
|
||||
func downloadDB(appVersion, cacheDir string, quiet, skipUpdate bool) error {
|
||||
client := db.NewClient(cacheDir, quiet)
|
||||
ctx := context.Background()
|
||||
needsUpdate, err := client.NeedsUpdate(appVersion, light, skipUpdate)
|
||||
needsUpdate, err := client.NeedsUpdate(appVersion, skipUpdate)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("database error: %w", err)
|
||||
}
|
||||
@@ -73,12 +73,9 @@ func downloadDB(appVersion, cacheDir string, quiet, light, skipUpdate bool) erro
|
||||
if needsUpdate {
|
||||
logging.Log.Info("Need to update DB")
|
||||
logging.Log.Info("Downloading DB...")
|
||||
if err := client.Download(ctx, cacheDir, light); err != nil {
|
||||
if err := client.Download(ctx, cacheDir); err != nil {
|
||||
return xerrors.Errorf("failed to download vulnerability DB: %w", err)
|
||||
}
|
||||
if err = client.UpdateMetadata(cacheDir); err != nil {
|
||||
return xerrors.Errorf("unable to update database metadata: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// for debug
|
||||
@@ -88,24 +85,13 @@ func downloadDB(appVersion, cacheDir string, quiet, light, skipUpdate bool) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func initializeDBClient(cacheDir string, quiet bool) db.Client {
|
||||
config := db2.Config{}
|
||||
client := github.NewClient()
|
||||
progressBar := indicator.NewProgressBar(quiet)
|
||||
realClock := clock.RealClock{}
|
||||
fs := afero.NewOsFs()
|
||||
metadata := db.NewMetadata(fs, cacheDir)
|
||||
dbClient := db.NewClient(config, client, progressBar, realClock, metadata)
|
||||
return dbClient
|
||||
}
|
||||
|
||||
func showDBInfo(cacheDir string) error {
|
||||
m := db.NewMetadata(afero.NewOsFs(), cacheDir)
|
||||
metadata, err := m.Get()
|
||||
m := metadata.NewClient(cacheDir)
|
||||
meta, err := m.Get()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("something wrong with DB: %w", err)
|
||||
}
|
||||
logging.Log.Debugf("DB Schema: %d, Type: %d, UpdatedAt: %s, NextUpdate: %s",
|
||||
metadata.Version, metadata.Type, metadata.UpdatedAt, metadata.NextUpdate)
|
||||
log.Logger.Debugf("DB Schema: %d, UpdatedAt: %s, NextUpdate: %s, DownloadedAt: %s",
|
||||
meta.Version, meta.UpdatedAt, meta.NextUpdate, meta.DownloadedAt)
|
||||
return nil
|
||||
}
|
||||
|
||||
196
detector/msf.go
196
detector/msf.go
@@ -1,50 +1,186 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
metasploitdb "github.com/takuzoo3868/go-msfdb/db"
|
||||
metasploitmodels "github.com/takuzoo3868/go-msfdb/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
metasploitdb "github.com/vulsio/go-msfdb/db"
|
||||
metasploitmodels "github.com/vulsio/go-msfdb/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// FillWithMetasploit fills metasploit module information that has in module
|
||||
func FillWithMetasploit(r *models.ScanResult, cnf config.MetasploitConf) (nMetasploitCve int, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
var cveIDs []string
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
prefix, err := util.URLPathJoin(cnf.GetURL(), "cves")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
responses, err := getMetasploitsViaHTTP(cveIDs, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, res := range responses {
|
||||
msfs := []metasploitmodels.Metasploit{}
|
||||
if err := json.Unmarshal([]byte(res.json), &msfs); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
metasploits := ConvertToModelsMsf(msfs)
|
||||
v, ok := r.ScannedCves[res.request.cveID]
|
||||
if ok {
|
||||
v.Metasploits = metasploits
|
||||
}
|
||||
r.ScannedCves[res.request.cveID] = v
|
||||
nMetasploitCve++
|
||||
}
|
||||
} else {
|
||||
driver, locked, err := newMetasploitDB(&cnf)
|
||||
if locked {
|
||||
return 0, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
driver, locked, err := newMetasploitDB(&cnf)
|
||||
if locked {
|
||||
return 0, xerrors.Errorf("SQLite3 is locked: %s", cnf.GetSQLite3Path())
|
||||
} else if err != nil {
|
||||
return 0, err
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
ms, err := driver.GetModuleByCveID(cveID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if len(ms) == 0 {
|
||||
continue
|
||||
}
|
||||
modules := ConvertToModelsMsf(ms)
|
||||
vuln.Metasploits = modules
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nMetasploitCve++
|
||||
}
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v")
|
||||
}
|
||||
}()
|
||||
|
||||
for cveID, vuln := range r.ScannedCves {
|
||||
if cveID == "" {
|
||||
continue
|
||||
}
|
||||
ms := driver.GetModuleByCveID(cveID)
|
||||
if len(ms) == 0 {
|
||||
continue
|
||||
}
|
||||
modules := ConvertToModelsMsf(ms)
|
||||
vuln.Metasploits = modules
|
||||
r.ScannedCves[cveID] = vuln
|
||||
nMetasploitCve++
|
||||
}
|
||||
|
||||
return nMetasploitCve, nil
|
||||
}
|
||||
|
||||
// ConvertToModelsMsf converts gost model to vuls model
|
||||
func ConvertToModelsMsf(ms []*metasploitmodels.Metasploit) (modules []models.Metasploit) {
|
||||
type metasploitResponse struct {
|
||||
request metasploitRequest
|
||||
json string
|
||||
}
|
||||
|
||||
func getMetasploitsViaHTTP(cveIDs []string, urlPrefix string) (
|
||||
responses []metasploitResponse, err error) {
|
||||
nReq := len(cveIDs)
|
||||
reqChan := make(chan metasploitRequest, nReq)
|
||||
resChan := make(chan metasploitResponse, nReq)
|
||||
errChan := make(chan error, nReq)
|
||||
defer close(reqChan)
|
||||
defer close(resChan)
|
||||
defer close(errChan)
|
||||
|
||||
go func() {
|
||||
for _, cveID := range cveIDs {
|
||||
reqChan <- metasploitRequest{
|
||||
cveID: cveID,
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
tasks <- func() {
|
||||
req := <-reqChan
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.cveID,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
} else {
|
||||
logging.Log.Debugf("HTTP Request to %s", url)
|
||||
httpGetMetasploit(url, req, resChan, errChan)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timeout := time.After(2 * 60 * time.Second)
|
||||
var errs []error
|
||||
for i := 0; i < nReq; i++ {
|
||||
select {
|
||||
case res := <-resChan:
|
||||
responses = append(responses, res)
|
||||
case err := <-errChan:
|
||||
errs = append(errs, err)
|
||||
case <-timeout:
|
||||
return nil, xerrors.New("Timeout Fetching Metasploit")
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return nil, xerrors.Errorf("Failed to fetch Metasploit. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type metasploitRequest struct {
|
||||
cveID string
|
||||
}
|
||||
|
||||
func httpGetMetasploit(url string, req metasploitRequest, resChan chan<- metasploitResponse, errChan chan<- error) {
|
||||
var body string
|
||||
var errs []error
|
||||
var resp *http.Response
|
||||
count, retryMax := 0, 3
|
||||
f := func() (err error) {
|
||||
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
|
||||
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
count++
|
||||
if count == retryMax {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
notify := func(err error, t time.Duration) {
|
||||
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
|
||||
}
|
||||
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
|
||||
if err != nil {
|
||||
errChan <- xerrors.Errorf("HTTP Error %w", err)
|
||||
return
|
||||
}
|
||||
if count == retryMax {
|
||||
errChan <- xerrors.New("Retry count exceeded")
|
||||
return
|
||||
}
|
||||
|
||||
resChan <- metasploitResponse{
|
||||
request: req,
|
||||
json: body,
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertToModelsMsf converts metasploit model to vuls model
|
||||
func ConvertToModelsMsf(ms []metasploitmodels.Metasploit) (modules []models.Metasploit) {
|
||||
for _, m := range ms {
|
||||
var links []string
|
||||
if 0 < len(m.References) {
|
||||
@@ -71,7 +207,7 @@ func newMetasploitDB(cnf config.VulnDictInterface) (driver metasploitdb.DB, lock
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = metasploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), false); err != nil {
|
||||
if driver, locked, err = metasploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), metasploitdb.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("metasploitDB is locked. err: %w", err)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
@@ -6,6 +9,7 @@ import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"time"
|
||||
@@ -22,10 +26,7 @@ func reuseScannedCves(r *models.ScanResult) bool {
|
||||
case constant.FreeBSD, constant.Raspbian:
|
||||
return true
|
||||
}
|
||||
if isTrivyResult(r) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
return isTrivyResult(r)
|
||||
}
|
||||
|
||||
func isTrivyResult(r *models.ScanResult) bool {
|
||||
@@ -64,10 +65,9 @@ func loadPrevious(currs models.ScanResults, resultsDir string) (prevs models.Sca
|
||||
prevs = append(prevs, *r)
|
||||
logging.Log.Infof("Previous json found: %s", path)
|
||||
break
|
||||
} else {
|
||||
logging.Log.Infof("Previous json is different family.Release: %s, pre: %s.%s cur: %s.%s",
|
||||
path, r.Family, r.Release, result.Family, result.Release)
|
||||
}
|
||||
logging.Log.Infof("Previous json is different family.Release: %s, pre: %s.%s cur: %s.%s",
|
||||
path, r.Family, r.Release, result.Family, result.Release)
|
||||
}
|
||||
}
|
||||
return prevs, nil
|
||||
@@ -141,7 +141,7 @@ func getPlusDiffCves(previous, current models.ScanResult) models.VulnInfos {
|
||||
|
||||
// TODO commented out because a bug of diff logic when multiple oval defs found for a certain CVE-ID and same updated_at
|
||||
// if these OVAL defs have different affected packages, this logic detects as updated.
|
||||
// This logic will be uncomented after integration with gost https://github.com/knqyf263/gost
|
||||
// This logic will be uncomented after integration with gost https://github.com/vulsio/gost
|
||||
// } else if isCveFixed(v, previous) {
|
||||
// updated[v.CveID] = v
|
||||
// logging.Log.Debugf("fixed: %s", v.CveID)
|
||||
@@ -194,30 +194,34 @@ func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
|
||||
models.NewCveContentType(current.Family),
|
||||
}
|
||||
|
||||
prevLastModified := map[models.CveContentType]time.Time{}
|
||||
prevLastModified := map[models.CveContentType][]time.Time{}
|
||||
preVinfo, ok := previous.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if content, ok := preVinfo.CveContents[cType]; ok {
|
||||
prevLastModified[cType] = content.LastModified
|
||||
if conts, ok := preVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
prevLastModified[cType] = append(prevLastModified[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
curLastModified := map[models.CveContentType]time.Time{}
|
||||
curLastModified := map[models.CveContentType][]time.Time{}
|
||||
curVinfo, ok := current.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if content, ok := curVinfo.CveContents[cType]; ok {
|
||||
curLastModified[cType] = content.LastModified
|
||||
if conts, ok := curVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
curLastModified[cType] = append(curLastModified[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, t := range cTypes {
|
||||
if !curLastModified[t].Equal(prevLastModified[t]) {
|
||||
if !reflect.DeepEqual(curLastModified[t], prevLastModified[t]) {
|
||||
logging.Log.Debugf("%s LastModified not equal: \n%s\n%s",
|
||||
cveID, curLastModified[t], prevLastModified[t])
|
||||
return true
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package detector
|
||||
|
||||
import (
|
||||
|
||||
170
go.mod
170
go.mod
@@ -1,24 +1,34 @@
|
||||
module github.com/future-architect/vuls
|
||||
|
||||
go 1.16
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go v50.2.0+incompatible
|
||||
github.com/BurntSushi/toml v0.3.1
|
||||
github.com/aquasecurity/fanal v0.0.0-20210119051230-28c249da7cfd
|
||||
github.com/aquasecurity/trivy v0.16.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20210121143430-2a5c54036a86
|
||||
github.com/Azure/azure-sdk-for-go v61.2.0+incompatible
|
||||
github.com/BurntSushi/toml v1.0.0
|
||||
github.com/Ullaakut/nmap/v2 v2.1.2-0.20210406060955-59a52fe80a4f
|
||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||
github.com/aquasecurity/fanal v0.0.0-20220129174924-b9e05fcccc57
|
||||
github.com/aquasecurity/go-dep-parser v0.0.0-20220110153540-4a30ebc4b509
|
||||
github.com/aquasecurity/trivy v0.23.0
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20220130223604-df65ebde46f4
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
|
||||
github.com/aws/aws-sdk-go v1.36.31
|
||||
github.com/aws/aws-sdk-go v1.42.30
|
||||
github.com/boltdb/bolt v1.3.1
|
||||
github.com/briandowns/spinner v1.16.0 // indirect
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible
|
||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/cheggaaa/pb/v3 v3.0.8 // indirect
|
||||
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
|
||||
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
|
||||
github.com/emersion/go-smtp v0.14.0
|
||||
github.com/fatih/color v1.13.0 // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.1 // indirect
|
||||
github.com/go-redis/redis/v8 v8.11.4 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
github.com/google/subcommands v1.2.0
|
||||
github.com/gosuri/uitable v0.0.4
|
||||
github.com/hashicorp/go-uuid v1.0.2
|
||||
github.com/hashicorp/go-version v1.2.1
|
||||
github.com/hashicorp/go-version v1.3.0
|
||||
github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c
|
||||
github.com/jesseduffield/gocui v0.3.0
|
||||
github.com/k0kubun/pp v3.0.1+incompatible
|
||||
@@ -26,35 +36,137 @@ require (
|
||||
github.com/knqyf263/go-cpe v0.0.0-20201213041631-54f6ab28673f
|
||||
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
|
||||
github.com/knqyf263/go-rpm-version v0.0.0-20170716094938-74609b86c936
|
||||
github.com/knqyf263/gost v0.1.10
|
||||
github.com/kotakanbe/go-cve-dictionary v0.5.10
|
||||
github.com/kotakanbe/go-pingscanner v0.1.0
|
||||
github.com/kotakanbe/goval-dictionary v0.3.3
|
||||
github.com/kotakanbe/logrus-prefixed-formatter v0.0.0-20180123152602-928f7356cb96
|
||||
github.com/lib/pq v1.10.0 // indirect
|
||||
github.com/magiconair/properties v1.8.4 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/mitchellh/mapstructure v1.4.1 // indirect
|
||||
github.com/nlopes/slack v0.6.0
|
||||
github.com/nsf/termbox-go v0.0.0-20200418040025-38ba6e5628f1 // indirect
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/parnurzeal/gorequest v0.2.16
|
||||
github.com/pelletier/go-toml v1.8.1 // indirect
|
||||
github.com/pelletier/go-toml v1.9.4 // indirect
|
||||
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
|
||||
github.com/sirupsen/logrus v1.7.0
|
||||
github.com/spf13/afero v1.6.0
|
||||
github.com/spf13/cast v1.3.1 // indirect
|
||||
github.com/spf13/cobra v1.1.3
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/takuzoo3868/go-msfdb v0.1.5
|
||||
github.com/vulsio/go-exploitdb v0.1.7
|
||||
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 // indirect
|
||||
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 // indirect
|
||||
golang.org/x/net v0.0.0-20210323141857-08027d57d8cf // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20210125201302-af13f521f196
|
||||
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4 // indirect
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
github.com/spf13/cast v1.4.1 // indirect
|
||||
github.com/spf13/cobra v1.3.0
|
||||
github.com/vulsio/go-cve-dictionary v0.8.2-0.20211028094424-0a854f8e8f85
|
||||
github.com/vulsio/go-exploitdb v0.4.2-0.20211028071949-1ebf9c4f6c4d
|
||||
github.com/vulsio/go-kev v0.1.0
|
||||
github.com/vulsio/go-msfdb v0.2.1-0.20211028071756-4a9759bd9f14
|
||||
github.com/vulsio/gost v0.4.1-0.20211028071837-7ad032a6ffa8
|
||||
github.com/vulsio/goval-dictionary v0.7.0
|
||||
golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce // indirect
|
||||
golang.org/x/net v0.0.0-20220114011407-0dd24b26b47d // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
k8s.io/utils v0.0.0-20210111153108-fddb29f9d009
|
||||
gopkg.in/ini.v1 v1.66.3 // indirect
|
||||
gorm.io/driver/mysql v1.2.3 // indirect
|
||||
gorm.io/driver/postgres v1.2.3 // indirect
|
||||
gorm.io/driver/sqlite v1.2.6 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.99.0 // indirect
|
||||
cloud.google.com/go/storage v1.14.0 // indirect
|
||||
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||
github.com/Azure/go-autorest/autorest v0.11.24 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.18 // indirect
|
||||
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
||||
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||
github.com/Masterminds/semver v1.5.0 // indirect
|
||||
github.com/Masterminds/sprig v2.22.0+incompatible // indirect
|
||||
github.com/PuerkitoBio/goquery v1.6.1 // indirect
|
||||
github.com/andybalholm/cascadia v1.2.0 // indirect
|
||||
github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce // indirect
|
||||
github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 // indirect
|
||||
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 // indirect
|
||||
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect
|
||||
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
|
||||
github.com/caarlos0/env/v6 v6.0.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/cli v20.10.11+incompatible // indirect
|
||||
github.com/docker/distribution v2.7.1+incompatible // indirect
|
||||
github.com/docker/docker v20.10.12+incompatible // indirect
|
||||
github.com/docker/docker-credential-helpers v0.6.4 // indirect
|
||||
github.com/go-sql-driver/mysql v1.6.0 // indirect
|
||||
github.com/gofrs/uuid v4.0.0+incompatible // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/go-containerregistry v0.7.1-0.20211214010025-a65b7844a475 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.1.1 // indirect
|
||||
github.com/gorilla/websocket v1.4.2 // indirect
|
||||
github.com/grokify/html-strip-tags-go v0.0.1 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-getter v1.5.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.0 // indirect
|
||||
github.com/hashicorp/go-safetemp v1.0.0 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/htcat/htcat v1.0.2 // indirect
|
||||
github.com/huandu/xstrings v1.3.2 // indirect
|
||||
github.com/imdario/mergo v0.3.12 // indirect
|
||||
github.com/inconshreveable/log15 v0.0.0-20201112154412-8562bdadbbac // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.11.0 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.2.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||
github.com/jackc/pgtype v1.10.0 // indirect
|
||||
github.com/jackc/pgx/v4 v4.15.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.4 // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/klauspost/compress v1.13.6 // indirect
|
||||
github.com/magiconair/properties v1.8.5 // indirect
|
||||
github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 // indirect
|
||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.11 // indirect
|
||||
github.com/mitchellh/copystructure v1.1.1 // indirect
|
||||
github.com/mitchellh/go-testing-interface v1.0.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.4.3 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.1 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.0.2-0.20211117181255-693428a734f5 // indirect
|
||||
github.com/owenrumney/go-sarif/v2 v2.0.17 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/spf13/afero v1.8.1 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/viper v1.10.0 // indirect
|
||||
github.com/stretchr/objx v0.3.0 // indirect
|
||||
github.com/stretchr/testify v1.7.0 // indirect
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/ulikunitz/xz v0.5.10 // indirect
|
||||
github.com/ymomoi/goval-parser v0.0.0-20170813122243-0a0be1dd9d08 // indirect
|
||||
go.etcd.io/bbolt v1.3.6 // indirect
|
||||
go.opencensus.io v0.23.0 // indirect
|
||||
go.uber.org/atomic v1.7.0 // indirect
|
||||
go.uber.org/multierr v1.6.0 // indirect
|
||||
go.uber.org/zap v1.20.0 // indirect
|
||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 // indirect
|
||||
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf // indirect
|
||||
google.golang.org/api v0.62.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect
|
||||
google.golang.org/grpc v1.43.0 // indirect
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
gorm.io/gorm v1.22.5 // indirect
|
||||
k8s.io/utils v0.0.0-20201110183641-67b214c5f920 // indirect
|
||||
moul.io/http2curl v1.0.0 // indirect
|
||||
)
|
||||
|
||||
180
gost/debian.go
180
gost/debian.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
@@ -5,11 +6,12 @@ package gost
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
debver "github.com/knqyf263/go-deb-version"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Debian is Gost client for Debian GNU/Linux
|
||||
@@ -21,6 +23,7 @@ type packCves struct {
|
||||
packName string
|
||||
isSrcPack bool
|
||||
cves []models.CveContent
|
||||
fixes models.PackageFixStatuses
|
||||
}
|
||||
|
||||
func (deb Debian) supported(major string) bool {
|
||||
@@ -28,23 +31,23 @@ func (deb Debian) supported(major string) bool {
|
||||
"8": "jessie",
|
||||
"9": "stretch",
|
||||
"10": "buster",
|
||||
"11": "bullseye",
|
||||
}[major]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (deb Debian) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (deb Debian) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if !deb.supported(major(r.Release)) {
|
||||
// only logging
|
||||
logging.Log.Warnf("Debian %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
// Add linux and set the version of running kernel to search OVAL.
|
||||
// Add linux and set the version of running kernel to search Gost.
|
||||
if r.Container.ContainerID == "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages[linuxImage]; ok {
|
||||
if p, ok := r.Packages["linux-image-"+r.RunningKernel.Release]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
@@ -54,18 +57,35 @@ func (deb Debian) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err er
|
||||
}
|
||||
}
|
||||
|
||||
// Debian Security Tracker does not support Package for Raspbian, so skip it.
|
||||
var scanResult models.ScanResult
|
||||
if r.Family != constant.Raspbian {
|
||||
scanResult = *r
|
||||
} else {
|
||||
scanResult = r.RemoveRaspbianPackFromResult()
|
||||
stashLinuxPackage := r.Packages["linux"]
|
||||
nFixedCVEs, err := deb.detectCVEsWithFixState(r, "resolved")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
r.Packages["linux"] = stashLinuxPackage
|
||||
nUnfixedCVEs, err := deb.detectCVEsWithFixState(r, "open")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return (nFixedCVEs + nUnfixedCVEs), nil
|
||||
}
|
||||
|
||||
func (deb Debian) detectCVEsWithFixState(r *models.ScanResult, fixStatus string) (nCVEs int, err error) {
|
||||
if fixStatus != "resolved" && fixStatus != "open" {
|
||||
return 0, xerrors.Errorf(`Failed to detectCVEsWithFixState. fixStatus is not allowed except "open" and "resolved"(actual: fixStatus -> %s).`, fixStatus)
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if deb.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
url, _ := util.URLPathJoin(deb.DBDriver.Cnf.GetURL(), "debian", major(scanResult.Release), "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, url)
|
||||
url, _ := util.URLPathJoin(deb.DBDriver.Cnf.GetURL(), "debian", major(r.Release), "pkgs")
|
||||
s := "unfixed-cves"
|
||||
if s == "resolved" {
|
||||
s = "fixed-cves"
|
||||
}
|
||||
|
||||
responses, err := getCvesWithFixStateViaHTTP(r, url, s)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -76,43 +96,46 @@ func (deb Debian) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err er
|
||||
return 0, err
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, debcve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&debcve))
|
||||
fixes = append(fixes, checkPackageFixStatus(&debcve)...)
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if deb.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range scanResult.Packages {
|
||||
cveDebs := deb.DBDriver.DB.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
|
||||
cves := []models.CveContent{}
|
||||
for _, cveDeb := range cveDebs {
|
||||
cves = append(cves, *deb.ConvertToModel(&cveDeb))
|
||||
for _, pack := range r.Packages {
|
||||
cves, fixes, err := deb.getCvesDebianWithfixStatus(fixStatus, major(r.Release), pack.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range scanResult.SrcPackages {
|
||||
cveDebs := deb.DBDriver.DB.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
|
||||
cves := []models.CveContent{}
|
||||
for _, cveDeb := range cveDebs {
|
||||
cves = append(cves, *deb.ConvertToModel(&cveDeb))
|
||||
for _, pack := range r.SrcPackages {
|
||||
cves, fixes, err := deb.getCvesDebianWithfixStatus(fixStatus, major(r.Release), pack.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
fixes: fixes,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -120,13 +143,14 @@ func (deb Debian) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err er
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for _, cve := range p.cves {
|
||||
for i, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.DebianSecurityTracker] = cve
|
||||
v.CveContents[models.DebianSecurityTracker] = []models.CveContent{cve}
|
||||
v.Confidences = models.Confidences{models.DebianSecurityTrackerMatch}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
@@ -134,6 +158,31 @@ func (deb Debian) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err er
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.DebianSecurityTrackerMatch},
|
||||
}
|
||||
|
||||
if fixStatus == "resolved" {
|
||||
versionRelease := ""
|
||||
if p.isSrcPack {
|
||||
versionRelease = r.SrcPackages[p.packName].Version
|
||||
} else {
|
||||
versionRelease = r.Packages[p.packName].FormatVer()
|
||||
}
|
||||
|
||||
if versionRelease == "" {
|
||||
break
|
||||
}
|
||||
|
||||
affected, err := isGostDefAffected(versionRelease, p.fixes[i].FixedIn)
|
||||
if err != nil {
|
||||
logging.Log.Debugf("Failed to parse versions: %s, Ver: %s, Gost: %s",
|
||||
err, versionRelease, p.fixes[i].FixedIn)
|
||||
continue
|
||||
}
|
||||
|
||||
if !affected {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
@@ -148,25 +197,69 @@ func (deb Debian) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err er
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, linuxImage)
|
||||
names = append(names, "linux-image-"+r.RunningKernel.Release)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
if fixStatus == "resolved" {
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixedIn: p.fixes[i].FixedIn,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
func isGostDefAffected(versionRelease, gostVersion string) (affected bool, err error) {
|
||||
vera, err := debver.NewVersion(versionRelease)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
verb, err := debver.NewVersion(gostVersion)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return vera.LessThan(verb), nil
|
||||
}
|
||||
|
||||
func (deb Debian) getCvesDebianWithfixStatus(fixStatus, release, pkgName string) ([]models.CveContent, []models.PackageFixStatus, error) {
|
||||
var f func(string, string) (map[string]gostmodels.DebianCVE, error)
|
||||
if fixStatus == "resolved" {
|
||||
f = deb.DBDriver.DB.GetFixedCvesDebian
|
||||
} else {
|
||||
f = deb.DBDriver.DB.GetUnfixedCvesDebian
|
||||
}
|
||||
debCves, err := f(release, pkgName)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
cves := []models.CveContent{}
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, devbCve := range debCves {
|
||||
cves = append(cves, *deb.ConvertToModel(&devbCve))
|
||||
fixes = append(fixes, checkPackageFixStatus(&devbCve)...)
|
||||
}
|
||||
return cves, fixes, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
|
||||
severity := ""
|
||||
@@ -188,3 +281,22 @@ func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func checkPackageFixStatus(cve *gostmodels.DebianCVE) []models.PackageFixStatus {
|
||||
fixes := []models.PackageFixStatus{}
|
||||
for _, p := range cve.Package {
|
||||
for _, r := range p.Release {
|
||||
f := models.PackageFixStatus{Name: p.PackageName}
|
||||
|
||||
if r.Status == "open" {
|
||||
f.NotFixedYet = true
|
||||
} else {
|
||||
f.FixedIn = r.FixedVersion
|
||||
}
|
||||
|
||||
fixes = append(fixes, f)
|
||||
}
|
||||
}
|
||||
|
||||
return fixes
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import "testing"
|
||||
@@ -36,10 +39,17 @@ func TestDebian_Supported(t *testing.T) {
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "11 is not supported yet",
|
||||
name: "11 is supported",
|
||||
args: args{
|
||||
major: "11",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "12 is not supported yet",
|
||||
args: args{
|
||||
major: "12",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
|
||||
28
gost/gost.go
28
gost/gost.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
@@ -6,7 +7,7 @@ import (
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/knqyf263/gost/db"
|
||||
"github.com/vulsio/gost/db"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
@@ -20,7 +21,8 @@ type DBDriver struct {
|
||||
|
||||
// Client is the interface of OVAL client.
|
||||
type Client interface {
|
||||
DetectUnfixed(*models.ScanResult, bool) (int, error)
|
||||
DetectCVEs(*models.ScanResult, bool) (int, error)
|
||||
CloseDB() error
|
||||
}
|
||||
|
||||
// Base is a base struct
|
||||
@@ -28,6 +30,14 @@ type Base struct {
|
||||
DBDriver DBDriver
|
||||
}
|
||||
|
||||
// CloseDB close a DB connection
|
||||
func (b Base) CloseDB() error {
|
||||
if b.DBDriver.DB == nil {
|
||||
return nil
|
||||
}
|
||||
return b.DBDriver.DB.CloseDB()
|
||||
}
|
||||
|
||||
// FillCVEsWithRedHat fills CVE detailed with Red Hat Security
|
||||
func FillCVEsWithRedHat(r *models.ScanResult, cnf config.GostConf) error {
|
||||
db, locked, err := newGostDB(cnf)
|
||||
@@ -37,8 +47,10 @@ func FillCVEsWithRedHat(r *models.ScanResult, cnf config.GostConf) error {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := db.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
if db != nil {
|
||||
if err := db.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
return RedHat{Base{DBDriver{DB: db, Cnf: &cnf}}}.fillCvesWithRedHatAPI(r)
|
||||
@@ -56,14 +68,16 @@ func NewClient(cnf config.GostConf, family string) (Client, error) {
|
||||
driver := DBDriver{DB: db, Cnf: &cnf}
|
||||
|
||||
switch family {
|
||||
case constant.RedHat, constant.CentOS:
|
||||
case constant.RedHat, constant.CentOS, constant.Rocky, constant.Alma:
|
||||
return RedHat{Base{DBDriver: driver}}, nil
|
||||
case constant.Debian, constant.Raspbian:
|
||||
return Debian{Base{DBDriver: driver}}, nil
|
||||
case constant.Ubuntu:
|
||||
return Ubuntu{Base{DBDriver: driver}}, nil
|
||||
case constant.Windows:
|
||||
return Microsoft{Base{DBDriver: driver}}, nil
|
||||
default:
|
||||
return Pseudo{}, nil
|
||||
return Pseudo{Base{DBDriver: driver}}, nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,7 +90,7 @@ func newGostDB(cnf config.GostConf) (driver db.DB, locked bool, err error) {
|
||||
if cnf.GetType() == "sqlite3" {
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
if driver, locked, err = db.NewDB(cnf.GetType(), path, cnf.GetDebugSQL()); err != nil {
|
||||
if driver, locked, err = db.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), db.Option{}); err != nil {
|
||||
if locked {
|
||||
return nil, true, xerrors.Errorf("gostDB is locked. err: %w", err)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
@@ -5,7 +8,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
func TestSetPackageStates(t *testing.T) {
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Microsoft is Gost client for windows
|
||||
@@ -14,8 +16,8 @@ type Microsoft struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (ms Microsoft) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (ms Microsoft) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
if ms.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
@@ -23,7 +25,11 @@ func (ms Microsoft) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err
|
||||
for cveID := range r.ScannedCves {
|
||||
cveIDs = append(cveIDs, cveID)
|
||||
}
|
||||
for cveID, msCve := range ms.DBDriver.DB.GetMicrosoftMulti(cveIDs) {
|
||||
msCves, err := ms.DBDriver.DB.GetMicrosoftMulti(cveIDs)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
for cveID, msCve := range msCves {
|
||||
if _, ok := r.ScannedCves[cveID]; !ok {
|
||||
continue
|
||||
}
|
||||
@@ -32,7 +38,7 @@ func (ms Microsoft) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.CveContents{}
|
||||
}
|
||||
v.CveContents[models.Microsoft] = *cveCont
|
||||
v.CveContents[models.Microsoft] = []models.CveContent{*cveCont}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
r.ScannedCves[cveID] = v
|
||||
}
|
||||
@@ -41,6 +47,9 @@ func (ms Microsoft) DetectUnfixed(r *models.ScanResult, _ bool) (nCVEs int, err
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ms Microsoft) ConvertToModel(cve *gostmodels.MicrosoftCVE) (*models.CveContent, []models.Mitigation) {
|
||||
sort.Slice(cve.ScoreSets, func(i, j int) bool {
|
||||
return cve.ScoreSets[i].Vector < cve.ScoreSets[j].Vector
|
||||
})
|
||||
v3score := 0.0
|
||||
var v3Vector string
|
||||
for _, scoreSet := range cve.ScoreSets {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
@@ -11,7 +12,7 @@ type Pseudo struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (pse Pseudo) DetectUnfixed(r *models.ScanResult, _ bool) (int, error) {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (pse Pseudo) DetectCVEs(_ *models.ScanResult, _ bool) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
@@ -8,9 +9,10 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/knqyf263/gost/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// RedHat is Gost client for RedHat family linux
|
||||
@@ -18,10 +20,14 @@ type RedHat struct {
|
||||
Base
|
||||
}
|
||||
|
||||
// DetectUnfixed fills cve information that has in Gost
|
||||
func (red RedHat) DetectUnfixed(r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (red RedHat) DetectCVEs(r *models.ScanResult, ignoreWillNotFix bool) (nCVEs int, err error) {
|
||||
gostRelease := r.Release
|
||||
if r.Family == constant.CentOS {
|
||||
gostRelease = strings.TrimPrefix(r.Release, "stream")
|
||||
}
|
||||
if red.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
prefix, _ := util.URLPathJoin(red.DBDriver.Cnf.GetURL(), "redhat", major(r.Release), "pkgs")
|
||||
prefix, _ := util.URLPathJoin(red.DBDriver.Cnf.GetURL(), "redhat", major(gostRelease), "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, prefix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
@@ -44,7 +50,10 @@ func (red RedHat) DetectUnfixed(r *models.ScanResult, ignoreWillNotFix bool) (nC
|
||||
}
|
||||
for _, pack := range r.Packages {
|
||||
// CVE-ID: RedhatCVE
|
||||
cves := red.DBDriver.DB.GetUnfixedCvesRedhat(major(r.Release), pack.Name, ignoreWillNotFix)
|
||||
cves, err := red.DBDriver.DB.GetUnfixedCvesRedhat(major(gostRelease), pack.Name, ignoreWillNotFix)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for _, cve := range cves {
|
||||
if newly := red.setUnfixedCveToScanResult(&cve, r); newly {
|
||||
nCVEs++
|
||||
@@ -84,7 +93,11 @@ func (red RedHat) fillCvesWithRedHatAPI(r *models.ScanResult) error {
|
||||
if red.DBDriver.DB == nil {
|
||||
return nil
|
||||
}
|
||||
for _, redCve := range red.DBDriver.DB.GetRedhatMulti(cveIDs) {
|
||||
redCves, err := red.DBDriver.DB.GetRedhatMulti(cveIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, redCve := range redCves {
|
||||
if len(redCve.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -102,7 +115,7 @@ func (red RedHat) setFixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models.S
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = *cveCont
|
||||
v.CveContents[models.RedHatAPI] = []models.CveContent{*cveCont}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
@@ -122,7 +135,7 @@ func (red RedHat) setUnfixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(*cveCont)
|
||||
} else {
|
||||
v.CveContents[models.RedHatAPI] = *cveCont
|
||||
v.CveContents[models.RedHatAPI] = []models.CveContent{*cveCont}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
@@ -133,8 +146,12 @@ func (red RedHat) setUnfixedCveToScanResult(cve *gostmodels.RedhatCVE, r *models
|
||||
newly = true
|
||||
}
|
||||
v.Mitigations = append(v.Mitigations, mitigations...)
|
||||
pkgStats := red.mergePackageStates(v,
|
||||
cve.PackageState, r.Packages, r.Release)
|
||||
|
||||
gostRelease := r.Release
|
||||
if r.Family == constant.CentOS {
|
||||
gostRelease = strings.TrimPrefix(r.Release, "stream")
|
||||
}
|
||||
pkgStats := red.mergePackageStates(v, cve.PackageState, r.Packages, gostRelease)
|
||||
if 0 < len(pkgStats) {
|
||||
v.AffectedPackages = pkgStats
|
||||
r.ScannedCves[cve.Name] = v
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
|
||||
197
gost/ubuntu.go
Normal file
197
gost/ubuntu.go
Normal file
@@ -0,0 +1,197 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
// Ubuntu is Gost client for Ubuntu
|
||||
type Ubuntu struct {
|
||||
Base
|
||||
}
|
||||
|
||||
func (ubu Ubuntu) supported(version string) bool {
|
||||
_, ok := map[string]string{
|
||||
"1404": "trusty",
|
||||
"1604": "xenial",
|
||||
"1804": "bionic",
|
||||
"2004": "focal",
|
||||
"2010": "groovy",
|
||||
"2104": "hirsute",
|
||||
}[version]
|
||||
return ok
|
||||
}
|
||||
|
||||
// DetectCVEs fills cve information that has in Gost
|
||||
func (ubu Ubuntu) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
|
||||
ubuReleaseVer := strings.Replace(r.Release, ".", "", 1)
|
||||
if !ubu.supported(ubuReleaseVer) {
|
||||
logging.Log.Warnf("Ubuntu %s is not supported yet", r.Release)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
linuxImage := "linux-image-" + r.RunningKernel.Release
|
||||
// Add linux and set the version of running kernel to search Gost.
|
||||
if r.Container.ContainerID == "" {
|
||||
newVer := ""
|
||||
if p, ok := r.Packages[linuxImage]; ok {
|
||||
newVer = p.NewVersion
|
||||
}
|
||||
r.Packages["linux"] = models.Package{
|
||||
Name: "linux",
|
||||
Version: r.RunningKernel.Version,
|
||||
NewVersion: newVer,
|
||||
}
|
||||
}
|
||||
|
||||
packCvesList := []packCves{}
|
||||
if ubu.DBDriver.Cnf.IsFetchViaHTTP() {
|
||||
url, _ := util.URLPathJoin(ubu.DBDriver.Cnf.GetURL(), "ubuntu", ubuReleaseVer, "pkgs")
|
||||
responses, err := getAllUnfixedCvesViaHTTP(r, url)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for _, res := range responses {
|
||||
ubuCves := map[string]gostmodels.UbuntuCVE{}
|
||||
if err := json.Unmarshal([]byte(res.json), &ubuCves); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: res.request.packName,
|
||||
isSrcPack: res.request.isSrcPack,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if ubu.DBDriver.DB == nil {
|
||||
return 0, nil
|
||||
}
|
||||
for _, pack := range r.Packages {
|
||||
ubuCves, err := ubu.DBDriver.DB.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: false,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
|
||||
// SrcPack
|
||||
for _, pack := range r.SrcPackages {
|
||||
ubuCves, err := ubu.DBDriver.DB.GetUnfixedCvesUbuntu(ubuReleaseVer, pack.Name)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
cves := []models.CveContent{}
|
||||
for _, ubucve := range ubuCves {
|
||||
cves = append(cves, *ubu.ConvertToModel(&ubucve))
|
||||
}
|
||||
packCvesList = append(packCvesList, packCves{
|
||||
packName: pack.Name,
|
||||
isSrcPack: true,
|
||||
cves: cves,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
delete(r.Packages, "linux")
|
||||
|
||||
for _, p := range packCvesList {
|
||||
for _, cve := range p.cves {
|
||||
v, ok := r.ScannedCves[cve.CveID]
|
||||
if ok {
|
||||
if v.CveContents == nil {
|
||||
v.CveContents = models.NewCveContents(cve)
|
||||
} else {
|
||||
v.CveContents[models.UbuntuAPI] = []models.CveContent{cve}
|
||||
}
|
||||
} else {
|
||||
v = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
CveContents: models.NewCveContents(cve),
|
||||
Confidences: models.Confidences{models.UbuntuAPIMatch},
|
||||
}
|
||||
nCVEs++
|
||||
}
|
||||
|
||||
names := []string{}
|
||||
if p.isSrcPack {
|
||||
if srcPack, ok := r.SrcPackages[p.packName]; ok {
|
||||
for _, binName := range srcPack.BinaryNames {
|
||||
if _, ok := r.Packages[binName]; ok {
|
||||
names = append(names, binName)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if p.packName == "linux" {
|
||||
names = append(names, linuxImage)
|
||||
} else {
|
||||
names = append(names, p.packName)
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
|
||||
Name: name,
|
||||
FixState: "open",
|
||||
NotFixedYet: true,
|
||||
})
|
||||
}
|
||||
r.ScannedCves[cve.CveID] = v
|
||||
}
|
||||
}
|
||||
return nCVEs, nil
|
||||
}
|
||||
|
||||
// ConvertToModel converts gost model to vuls model
|
||||
func (ubu Ubuntu) ConvertToModel(cve *gostmodels.UbuntuCVE) *models.CveContent {
|
||||
references := []models.Reference{}
|
||||
for _, r := range cve.References {
|
||||
if strings.Contains(r.Reference, "https://cve.mitre.org/cgi-bin/cvename.cgi?name=") {
|
||||
references = append(references, models.Reference{Source: "CVE", Link: r.Reference})
|
||||
} else {
|
||||
references = append(references, models.Reference{Link: r.Reference})
|
||||
}
|
||||
}
|
||||
|
||||
for _, b := range cve.Bugs {
|
||||
references = append(references, models.Reference{Source: "Bug", Link: b.Bug})
|
||||
}
|
||||
|
||||
for _, u := range cve.Upstreams {
|
||||
for _, upstreamLink := range u.UpstreamLinks {
|
||||
references = append(references, models.Reference{Source: "UPSTREAM", Link: upstreamLink.Link})
|
||||
}
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.UbuntuAPI,
|
||||
CveID: cve.Candidate,
|
||||
Summary: cve.Description,
|
||||
Cvss2Severity: cve.Priority,
|
||||
Cvss3Severity: cve.Priority,
|
||||
SourceLink: "https://ubuntu.com/security/" + cve.Candidate,
|
||||
References: references,
|
||||
Published: cve.PublicDate,
|
||||
}
|
||||
}
|
||||
137
gost/ubuntu_test.go
Normal file
137
gost/ubuntu_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package gost
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
gostmodels "github.com/vulsio/gost/models"
|
||||
)
|
||||
|
||||
func TestUbuntu_Supported(t *testing.T) {
|
||||
type args struct {
|
||||
ubuReleaseVer string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "14.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1404",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "16.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1604",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "18.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "1804",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "20.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2004",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "20.10 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2010",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "21.04 is supported",
|
||||
args: args{
|
||||
ubuReleaseVer: "2104",
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "empty string is not supported yet",
|
||||
args: args{
|
||||
ubuReleaseVer: "",
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ubu := Ubuntu{}
|
||||
if got := ubu.supported(tt.args.ubuReleaseVer); got != tt.want {
|
||||
t.Errorf("Ubuntu.Supported() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUbuntuConvertToModel(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input gostmodels.UbuntuCVE
|
||||
expected models.CveContent
|
||||
}{
|
||||
{
|
||||
name: "gost Ubuntu.ConvertToModel",
|
||||
input: gostmodels.UbuntuCVE{
|
||||
Candidate: "CVE-2021-3517",
|
||||
PublicDate: time.Date(2021, 5, 19, 14, 15, 0, 0, time.UTC),
|
||||
References: []gostmodels.UbuntuReference{
|
||||
{Reference: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-3517"},
|
||||
{Reference: "https://gitlab.gnome.org/GNOME/libxml2/-/issues/235"},
|
||||
{Reference: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/bf22713507fe1fc3a2c4b525cf0a88c2dc87a3a2"}},
|
||||
Description: "description.",
|
||||
Notes: []gostmodels.UbuntuNote{},
|
||||
Bugs: []gostmodels.UbuntuBug{{Bug: "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=987738"}},
|
||||
Priority: "medium",
|
||||
Patches: []gostmodels.UbuntuPatch{
|
||||
{PackageName: "libxml2", ReleasePatches: []gostmodels.UbuntuReleasePatch{
|
||||
{ReleaseName: "focal", Status: "needed", Note: ""},
|
||||
}},
|
||||
},
|
||||
Upstreams: []gostmodels.UbuntuUpstream{{
|
||||
PackageName: "libxml2", UpstreamLinks: []gostmodels.UbuntuUpstreamLink{
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/50f06b3efb638efb0abd95dc62dca05ae67882c2"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
expected: models.CveContent{
|
||||
Type: models.UbuntuAPI,
|
||||
CveID: "CVE-2021-3517",
|
||||
Summary: "description.",
|
||||
Cvss2Severity: "medium",
|
||||
Cvss3Severity: "medium",
|
||||
SourceLink: "https://ubuntu.com/security/CVE-2021-3517",
|
||||
References: []models.Reference{
|
||||
{Source: "CVE", Link: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-3517"},
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/issues/235"},
|
||||
{Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/bf22713507fe1fc3a2c4b525cf0a88c2dc87a3a2"},
|
||||
{Source: "Bug", Link: "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=987738"},
|
||||
{Source: "UPSTREAM", Link: "https://gitlab.gnome.org/GNOME/libxml2/-/commit/50f06b3efb638efb0abd95dc62dca05ae67882c2"}},
|
||||
Published: time.Date(2021, 5, 19, 14, 15, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ubu := Ubuntu{}
|
||||
got := ubu.ConvertToModel(&tt.input)
|
||||
if !reflect.DeepEqual(got, &tt.expected) {
|
||||
t.Errorf("Ubuntu.ConvertToModel() = %#v, want %#v", got, &tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package gost
|
||||
|
||||
import (
|
||||
@@ -83,7 +86,10 @@ type request struct {
|
||||
|
||||
func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
|
||||
responses []response, err error) {
|
||||
return getCvesWithFixStateViaHTTP(r, urlPrefix, "unfixed-cves")
|
||||
}
|
||||
|
||||
func getCvesWithFixStateViaHTTP(r *models.ScanResult, urlPrefix, fixState string) (responses []response, err error) {
|
||||
nReq := len(r.Packages) + len(r.SrcPackages)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
@@ -118,7 +124,7 @@ func getAllUnfixedCvesViaHTTP(r *models.ScanResult, urlPrefix string) (
|
||||
url, err := util.URLPathJoin(
|
||||
urlPrefix,
|
||||
req.packName,
|
||||
"unfixed-cves",
|
||||
fixState,
|
||||
)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
|
||||
BIN
img/sponsor/tines.png
Normal file
BIN
img/sponsor/tines.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
1
integration
Submodule
1
integration
Submodule
Submodule integration added at 75327e7431
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,131 +0,0 @@
|
||||
{
|
||||
"jsonVersion": 4,
|
||||
"lang": "",
|
||||
"serverUUID": "",
|
||||
"serverName": "rails",
|
||||
"family": "pseudo",
|
||||
"release": "",
|
||||
"container": {
|
||||
"containerID": "",
|
||||
"name": "",
|
||||
"image": "",
|
||||
"type": "",
|
||||
"uuid": ""
|
||||
},
|
||||
"platform": {
|
||||
"name": "other",
|
||||
"instanceID": ""
|
||||
},
|
||||
"scannedAt": "2021-03-31T12:22:26.428630183+09:00",
|
||||
"scanMode": "fast mode",
|
||||
"scannedVersion": "v0.15.9",
|
||||
"scannedRevision": "build-20210331_121257_1a58c94",
|
||||
"scannedBy": "dev",
|
||||
"scannedVia": "pseudo",
|
||||
"scannedIpv4Addrs": [
|
||||
"172.19.0.1",
|
||||
"172.17.0.1",
|
||||
"172.27.0.1"
|
||||
],
|
||||
"reportedAt": "0001-01-01T00:00:00Z",
|
||||
"reportedVersion": "",
|
||||
"reportedRevision": "",
|
||||
"reportedBy": "",
|
||||
"errors": [],
|
||||
"warnings": [],
|
||||
"scannedCves": {},
|
||||
"runningKernel": {
|
||||
"release": "",
|
||||
"version": "",
|
||||
"rebootRequired": false
|
||||
},
|
||||
"packages": {},
|
||||
"config": {
|
||||
"scan": {
|
||||
"logDir": "/var/log/vuls",
|
||||
"resultsDir": "/home/ubuntu/go/src/github.com/future-architect/vuls/results",
|
||||
"default": {
|
||||
"port": "22",
|
||||
"scanMode": [
|
||||
"fast"
|
||||
]
|
||||
},
|
||||
"servers": {
|
||||
"rails": {
|
||||
"serverName": "rails",
|
||||
"cpeNames": [
|
||||
"cpe:/a:rubyonrails:ruby_on_rails:3.0.1"
|
||||
],
|
||||
"scanMode": [
|
||||
"fast"
|
||||
],
|
||||
"type": "pseudo",
|
||||
"wordpress": {}
|
||||
}
|
||||
},
|
||||
"cveDict": {
|
||||
"Name": "cveDict",
|
||||
"Type": "sqlite3",
|
||||
"SQLite3Path": "/home/ubuntu/go/src/github.com/kotakanbe/go-cve-dictionary/cve.sqlite3",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"ovalDict": {
|
||||
"Name": "ovalDict",
|
||||
"Type": "sqlite3",
|
||||
"SQLite3Path": "/home/ubuntu/go/src/github.com/kotakanbe/goval-dictionary/oval.sqlite3",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"gost": {
|
||||
"Name": "gost",
|
||||
"Type": "sqlite3",
|
||||
"SQLite3Path": "/home/ubuntu/go/src/github.com/future-architect/vuls/gost.sqlite3",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"exploit": {
|
||||
"Name": "exploit",
|
||||
"Type": "sqlite3",
|
||||
"SQLite3Path": "/home/ubuntu/go/src/github.com/vulsio/go-exploitdb/go-exploitdb.sqlite3",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"metasploit": {
|
||||
"Name": "metasploit",
|
||||
"Type": "sqlite3",
|
||||
"SQLite3Path": "/home/ubuntu/go/src/github.com/takuzoo3868/go-msfdb/go-msfdb.sqlite3",
|
||||
"DebugSQL": false
|
||||
}
|
||||
},
|
||||
"report": {
|
||||
"default": {},
|
||||
"cveDict": {
|
||||
"Name": "",
|
||||
"Type": "",
|
||||
"SQLite3Path": "",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"ovalDict": {
|
||||
"Name": "",
|
||||
"Type": "",
|
||||
"SQLite3Path": "",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"gost": {
|
||||
"Name": "",
|
||||
"Type": "",
|
||||
"SQLite3Path": "",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"exploit": {
|
||||
"Name": "",
|
||||
"Type": "",
|
||||
"SQLite3Path": "",
|
||||
"DebugSQL": false
|
||||
},
|
||||
"metasploit": {
|
||||
"Name": "",
|
||||
"Type": "",
|
||||
"SQLite3Path": "",
|
||||
"DebugSQL": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,27 +0,0 @@
|
||||
[cveDict]
|
||||
Type = "sqlite3"
|
||||
SQLite3Path = "/home/ubuntu/vulsctl/docker/cve.sqlite3"
|
||||
|
||||
[ovalDict]
|
||||
Type = "sqlite3"
|
||||
SQLite3Path = "/home/ubuntu/vulsctl/docker/oval.sqlite3"
|
||||
|
||||
[gost]
|
||||
Type = "sqlite3"
|
||||
SQLite3Path = "/home/ubuntu/vulsctl/docker/gost.sqlite3"
|
||||
|
||||
[exploit]
|
||||
Type = "sqlite3"
|
||||
SQLite3Path = "/home/ubuntu/vulsctl/docker/go-exploitdb.sqlite3"
|
||||
|
||||
[metasploit]
|
||||
type = "sqlite3"
|
||||
SQLite3Path = "/home/ubuntu/vulsctl/docker/go-msfdb.sqlite3"
|
||||
|
||||
[default]
|
||||
|
||||
[servers]
|
||||
|
||||
[servers.rails]
|
||||
type = "pseudo"
|
||||
cpeNames = [ "cpe:/a:rubyonrails:ruby_on_rails:3.0.1" ]
|
||||
@@ -1,27 +0,0 @@
|
||||
[cveDict]
|
||||
Type = "redis"
|
||||
Url = "redis://127.0.0.1/3"
|
||||
|
||||
[ovalDict]
|
||||
Type = "redis"
|
||||
Url = "redis://127.0.0.1/1"
|
||||
|
||||
[gost]
|
||||
Type = "redis"
|
||||
Url = "redis://127.0.0.1/2"
|
||||
|
||||
[exploit]
|
||||
Type = "redis"
|
||||
Url = "redis://127.0.0.1/4"
|
||||
|
||||
[metasploit]
|
||||
Type = "redis"
|
||||
Url = "redis://127.0.0.1/5"
|
||||
|
||||
[default]
|
||||
|
||||
[servers]
|
||||
|
||||
[servers.rails]
|
||||
type = "pseudo"
|
||||
cpeNames = [ "cpe:/a:rubyonrails:ruby_on_rails:3.0.1" ]
|
||||
@@ -3,6 +3,7 @@ package logging
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -17,10 +18,12 @@ import (
|
||||
|
||||
//LogOpts has options for logging
|
||||
type LogOpts struct {
|
||||
Debug bool `json:"debug,omitempty"`
|
||||
DebugSQL bool `json:"debugSQL,omitempty"`
|
||||
LogDir string `json:"logDir,omitempty"`
|
||||
Quiet bool `json:"quiet,omitempty"`
|
||||
Debug bool `json:"debug,omitempty"`
|
||||
DebugSQL bool `json:"debugSQL,omitempty"`
|
||||
LogToFile bool `json:"logToFile,omitempty"`
|
||||
LogDir string `json:"logDir,omitempty"`
|
||||
LogJSON bool `json:"logJSON"`
|
||||
Quiet bool `json:"quiet,omitempty"`
|
||||
}
|
||||
|
||||
// Log for localhost
|
||||
@@ -44,7 +47,7 @@ func NewNormalLogger() Logger {
|
||||
}
|
||||
|
||||
// NewCustomLogger creates logrus
|
||||
func NewCustomLogger(debug, quiet bool, logDir, logMsgAnsiColor, serverName string) Logger {
|
||||
func NewCustomLogger(debug, quiet, logToFile bool, logDir, logMsgAnsiColor, serverName string) Logger {
|
||||
log := logrus.New()
|
||||
log.Formatter = &formatter.TextFormatter{MsgAnsiColor: logMsgAnsiColor}
|
||||
log.Level = logrus.InfoLevel
|
||||
@@ -57,14 +60,17 @@ func NewCustomLogger(debug, quiet bool, logDir, logMsgAnsiColor, serverName stri
|
||||
return Logger{Entry: *logrus.NewEntry(log)}
|
||||
}
|
||||
|
||||
// File output
|
||||
dir := GetDefaultLogDir()
|
||||
if logDir != "" {
|
||||
dir = logDir
|
||||
whereami := "localhost"
|
||||
if serverName != "" {
|
||||
whereami = serverName
|
||||
}
|
||||
|
||||
// Only log to a file if quiet mode enabled
|
||||
if quiet && flag.Lookup("test.v") == nil {
|
||||
if logToFile {
|
||||
dir := GetDefaultLogDir()
|
||||
if logDir != "" {
|
||||
dir = logDir
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
if err := os.Mkdir(dir, 0700); err != nil {
|
||||
log.Errorf("Failed to create log directory. path: %s, err: %+v", dir, err)
|
||||
@@ -73,36 +79,33 @@ func NewCustomLogger(debug, quiet bool, logDir, logMsgAnsiColor, serverName stri
|
||||
|
||||
logFile := dir + "/vuls.log"
|
||||
if file, err := os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644); err == nil {
|
||||
log.Out = file
|
||||
log.Out = io.MultiWriter(os.Stderr, file)
|
||||
} else {
|
||||
log.Out = os.Stderr
|
||||
log.Errorf("Failed to create log file. path: %s, err: %+v", logFile, err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); err == nil {
|
||||
path := filepath.Join(dir, fmt.Sprintf("%s.log", whereami))
|
||||
if _, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644); err == nil {
|
||||
log.Hooks.Add(lfshook.NewHook(lfshook.PathMap{
|
||||
logrus.DebugLevel: path,
|
||||
logrus.InfoLevel: path,
|
||||
logrus.WarnLevel: path,
|
||||
logrus.ErrorLevel: path,
|
||||
logrus.FatalLevel: path,
|
||||
logrus.PanicLevel: path,
|
||||
}, nil))
|
||||
} else {
|
||||
log.Errorf("Failed to create log file. path: %s, err: %+v", path, err)
|
||||
}
|
||||
}
|
||||
} else if quiet {
|
||||
log.Out = ioutil.Discard
|
||||
} else {
|
||||
log.Out = os.Stderr
|
||||
}
|
||||
|
||||
whereami := "localhost"
|
||||
if 0 < len(serverName) {
|
||||
whereami = serverName
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); err == nil {
|
||||
path := filepath.Join(dir, fmt.Sprintf("%s.log", whereami))
|
||||
if _, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644); err == nil {
|
||||
log.Hooks.Add(lfshook.NewHook(lfshook.PathMap{
|
||||
logrus.DebugLevel: path,
|
||||
logrus.InfoLevel: path,
|
||||
logrus.WarnLevel: path,
|
||||
logrus.ErrorLevel: path,
|
||||
logrus.FatalLevel: path,
|
||||
logrus.PanicLevel: path,
|
||||
}, nil))
|
||||
} else {
|
||||
log.Errorf("Failed to create log file. path: %s, err: %+v", path, err)
|
||||
}
|
||||
}
|
||||
|
||||
entry := log.WithFields(logrus.Fields{"prefix": whereami})
|
||||
return Logger{Entry: *entry}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,32 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability"
|
||||
)
|
||||
|
||||
// CveContents has CveContent
|
||||
type CveContents map[CveContentType]CveContent
|
||||
type CveContents map[CveContentType][]CveContent
|
||||
|
||||
// NewCveContents create CveContents
|
||||
func NewCveContents(conts ...CveContent) CveContents {
|
||||
m := CveContents{}
|
||||
for _, cont := range conts {
|
||||
m[cont.Type] = cont
|
||||
if cont.Type == Jvn {
|
||||
found := false
|
||||
for _, cveCont := range m[cont.Type] {
|
||||
if cont.SourceLink == cveCont.SourceLink {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
m[cont.Type] = append(m[cont.Type], cont)
|
||||
}
|
||||
} else {
|
||||
m[cont.Type] = []CveContent{cont}
|
||||
}
|
||||
}
|
||||
return m
|
||||
}
|
||||
@@ -44,16 +56,18 @@ func (v CveContents) Except(exceptCtypes ...CveContentType) (values CveContents)
|
||||
}
|
||||
|
||||
// PrimarySrcURLs returns link of source
|
||||
func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveContentStr) {
|
||||
func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string, confidences Confidences) (values []CveContentStr) {
|
||||
if cveID == "" {
|
||||
return
|
||||
}
|
||||
|
||||
if cont, found := v[Nvd]; found {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Vendor Advisory" {
|
||||
values = append(values, CveContentStr{Nvd, r.Link})
|
||||
if conts, found := v[Nvd]; found {
|
||||
for _, cont := range conts {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Vendor Advisory" {
|
||||
values = append(values, CveContentStr{Nvd, r.Link})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -61,17 +75,31 @@ func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveC
|
||||
|
||||
order := CveContentTypes{Nvd, NewCveContentType(myFamily), GitHub}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found {
|
||||
if cont.SourceLink == "" {
|
||||
continue
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.SourceLink == "" {
|
||||
continue
|
||||
}
|
||||
values = append(values, CveContentStr{ctype, cont.SourceLink})
|
||||
}
|
||||
values = append(values, CveContentStr{ctype, cont.SourceLink})
|
||||
}
|
||||
}
|
||||
|
||||
if lang == "ja" {
|
||||
if cont, found := v[Jvn]; found && 0 < len(cont.SourceLink) {
|
||||
values = append(values, CveContentStr{Jvn, cont.SourceLink})
|
||||
jvnMatch := false
|
||||
for _, confidence := range confidences {
|
||||
if confidence.DetectionMethod == JvnVendorProductMatchStr {
|
||||
jvnMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if lang == "ja" || jvnMatch {
|
||||
if conts, found := v[Jvn]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.SourceLink) {
|
||||
values = append(values, CveContentStr{Jvn, cont.SourceLink})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,14 +114,17 @@ func (v CveContents) PrimarySrcURLs(lang, myFamily, cveID string) (values []CveC
|
||||
|
||||
// PatchURLs returns link of patch
|
||||
func (v CveContents) PatchURLs() (urls []string) {
|
||||
cont, found := v[Nvd]
|
||||
conts, found := v[Nvd]
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Patch" {
|
||||
urls = append(urls, r.Link)
|
||||
|
||||
for _, cont := range conts {
|
||||
for _, r := range cont.References {
|
||||
for _, t := range r.Tags {
|
||||
if t == "Patch" {
|
||||
urls = append(urls, r.Link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,11 +161,15 @@ func (v CveContents) Cpes(myFamily string) (values []CveContentCpes) {
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.Cpes) {
|
||||
values = append(values, CveContentCpes{
|
||||
Type: ctype,
|
||||
Value: cont.Cpes,
|
||||
})
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.Cpes) {
|
||||
values = append(values, CveContentCpes{
|
||||
Type: ctype,
|
||||
Value: cont.Cpes,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
@@ -152,11 +187,15 @@ func (v CveContents) References(myFamily string) (values []CveContentRefs) {
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.References) {
|
||||
values = append(values, CveContentRefs{
|
||||
Type: ctype,
|
||||
Value: cont.References,
|
||||
})
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.References) {
|
||||
values = append(values, CveContentRefs{
|
||||
Type: ctype,
|
||||
Value: cont.References,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,17 +207,21 @@ func (v CveContents) CweIDs(myFamily string) (values []CveContentStr) {
|
||||
order := CveContentTypes{NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(order...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v[ctype]; found && 0 < len(cont.CweIDs) {
|
||||
for _, cweID := range cont.CweIDs {
|
||||
for _, val := range values {
|
||||
if val.Value == cweID {
|
||||
continue
|
||||
if conts, found := v[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if 0 < len(cont.CweIDs) {
|
||||
for _, cweID := range cont.CweIDs {
|
||||
for _, val := range values {
|
||||
if val.Value == cweID {
|
||||
continue
|
||||
}
|
||||
}
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: cweID,
|
||||
})
|
||||
}
|
||||
}
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: cweID,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -197,6 +240,47 @@ func (v CveContents) UniqCweIDs(myFamily string) (values []CveContentStr) {
|
||||
return values
|
||||
}
|
||||
|
||||
// Sort elements for integration-testing
|
||||
func (v CveContents) Sort() {
|
||||
for contType, contents := range v {
|
||||
// CVSS3 desc, CVSS2 desc, SourceLink asc
|
||||
sort.Slice(contents, func(i, j int) bool {
|
||||
if contents[i].Cvss3Score > contents[j].Cvss3Score {
|
||||
return true
|
||||
} else if contents[i].Cvss3Score == contents[i].Cvss3Score {
|
||||
if contents[i].Cvss2Score > contents[j].Cvss2Score {
|
||||
return true
|
||||
} else if contents[i].Cvss2Score == contents[i].Cvss2Score {
|
||||
if contents[i].SourceLink < contents[j].SourceLink {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
v[contType] = contents
|
||||
}
|
||||
for contType, contents := range v {
|
||||
for cveID, cont := range contents {
|
||||
sort.Slice(cont.References, func(i, j int) bool {
|
||||
return cont.References[i].Link < cont.References[j].Link
|
||||
})
|
||||
sort.Slice(cont.CweIDs, func(i, j int) bool {
|
||||
return cont.CweIDs[i] < cont.CweIDs[j]
|
||||
})
|
||||
for i, ref := range cont.References {
|
||||
// sort v.CveContents[].References[].Tags
|
||||
sort.Slice(ref.Tags, func(j, k int) bool {
|
||||
return ref.Tags[j] < ref.Tags[k]
|
||||
})
|
||||
cont.References[i] = ref
|
||||
}
|
||||
contents[cveID] = cont
|
||||
}
|
||||
v[contType] = contents
|
||||
}
|
||||
}
|
||||
|
||||
// CveContent has abstraction of various vulnerability information
|
||||
type CveContent struct {
|
||||
Type CveContentType `json:"type"`
|
||||
@@ -233,18 +317,22 @@ func NewCveContentType(name string) CveContentType {
|
||||
return Nvd
|
||||
case "jvn":
|
||||
return Jvn
|
||||
case "redhat", "centos":
|
||||
case "redhat", "centos", "alma", "rocky":
|
||||
return RedHat
|
||||
case "fedora":
|
||||
return Fedora
|
||||
case "oracle":
|
||||
return Oracle
|
||||
case "ubuntu":
|
||||
return Ubuntu
|
||||
case "debian", vulnerability.DebianOVAL:
|
||||
case "debian", "debian-oval":
|
||||
return Debian
|
||||
case "redhat_api":
|
||||
return RedHatAPI
|
||||
case "debian_security_tracker":
|
||||
return DebianSecurityTracker
|
||||
case "ubuntu_api":
|
||||
return UbuntuAPI
|
||||
case "microsoft":
|
||||
return Microsoft
|
||||
case "wordpress":
|
||||
@@ -282,12 +370,18 @@ const (
|
||||
// Ubuntu is Ubuntu
|
||||
Ubuntu CveContentType = "ubuntu"
|
||||
|
||||
// UbuntuAPI is Ubuntu
|
||||
UbuntuAPI CveContentType = "ubuntu_api"
|
||||
|
||||
// Oracle is Oracle Linux
|
||||
Oracle CveContentType = "oracle"
|
||||
|
||||
// Amazon is Amazon Linux
|
||||
Amazon CveContentType = "amazon"
|
||||
|
||||
// Fedora is Fedora Linux
|
||||
Fedora CveContentType = "fedora"
|
||||
|
||||
// SUSE is SUSE Linux
|
||||
SUSE CveContentType = "suse"
|
||||
|
||||
@@ -317,10 +411,12 @@ var AllCveContetTypes = CveContentTypes{
|
||||
RedHat,
|
||||
RedHatAPI,
|
||||
Debian,
|
||||
Ubuntu,
|
||||
Amazon,
|
||||
SUSE,
|
||||
DebianSecurityTracker,
|
||||
Ubuntu,
|
||||
UbuntuAPI,
|
||||
Amazon,
|
||||
Fedora,
|
||||
SUSE,
|
||||
WpScan,
|
||||
Trivy,
|
||||
GitHub,
|
||||
|
||||
@@ -11,12 +11,12 @@ func TestExcept(t *testing.T) {
|
||||
out CveContents
|
||||
}{{
|
||||
in: CveContents{
|
||||
RedHat: {Type: RedHat},
|
||||
Ubuntu: {Type: Ubuntu},
|
||||
Debian: {Type: Debian},
|
||||
RedHat: []CveContent{{Type: RedHat}},
|
||||
Ubuntu: []CveContent{{Type: Ubuntu}},
|
||||
Debian: []CveContent{{Type: Debian}},
|
||||
},
|
||||
out: CveContents{
|
||||
RedHat: {Type: RedHat},
|
||||
RedHat: []CveContent{{Type: RedHat}},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -30,9 +30,10 @@ func TestExcept(t *testing.T) {
|
||||
|
||||
func TestSourceLinks(t *testing.T) {
|
||||
type in struct {
|
||||
lang string
|
||||
cveID string
|
||||
cont CveContents
|
||||
lang string
|
||||
cveID string
|
||||
cont CveContents
|
||||
confidences Confidences
|
||||
}
|
||||
var tests = []struct {
|
||||
in in
|
||||
@@ -44,15 +45,15 @@ func TestSourceLinks(t *testing.T) {
|
||||
lang: "ja",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
SourceLink: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
References: []Reference{
|
||||
{
|
||||
@@ -69,7 +70,7 @@ func TestSourceLinks(t *testing.T) {
|
||||
},
|
||||
},
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/CVE-2017-6074",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
@@ -97,14 +98,14 @@ func TestSourceLinks(t *testing.T) {
|
||||
lang: "en",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
SourceLink: "https://access.redhat.com/security/cve/CVE-2017-6074",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
@@ -128,11 +129,123 @@ func TestSourceLinks(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// Confidence: JvnVendorProductMatch
|
||||
{
|
||||
in: in{
|
||||
lang: "en",
|
||||
cveID: "CVE-2017-6074",
|
||||
cont: CveContents{
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
SourceLink: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
}},
|
||||
},
|
||||
confidences: Confidences{
|
||||
Confidence{DetectionMethod: JvnVendorProductMatchStr},
|
||||
},
|
||||
},
|
||||
out: []CveContentStr{
|
||||
{
|
||||
Type: Jvn,
|
||||
Value: "https://jvn.jp/vu/JVNVU93610402/",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for i, tt := range tests {
|
||||
actual := tt.in.cont.PrimarySrcURLs(tt.in.lang, "redhat", tt.in.cveID)
|
||||
actual := tt.in.cont.PrimarySrcURLs(tt.in.lang, "redhat", tt.in.cveID, tt.in.confidences)
|
||||
if !reflect.DeepEqual(tt.out, actual) {
|
||||
t.Errorf("\n[%d] expected: %v\n actual: %v\n", i, tt.out, actual)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCveContents_Sort(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
v CveContents
|
||||
want CveContents
|
||||
}{
|
||||
{
|
||||
name: "sorted",
|
||||
v: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{Cvss3Score: 3},
|
||||
{Cvss3Score: 10},
|
||||
},
|
||||
},
|
||||
want: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{Cvss3Score: 10},
|
||||
{Cvss3Score: 3},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss3, cvss2, sourceLink",
|
||||
v: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2023/JVNDB-2023-001210.html",
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2021/JVNDB-2021-001210.html",
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2021/JVNDB-2021-001210.html",
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2023/JVNDB-2023-001210.html",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss3, cvss2",
|
||||
v: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 1,
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[CveContentType][]CveContent{
|
||||
"jvn": {
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 10,
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tt.v.Sort()
|
||||
if !reflect.DeepEqual(tt.v, tt.want) {
|
||||
t.Errorf("\n[%s] expected: %v\n actual: %v\n", tt.name, tt.want, tt.v)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,19 +10,18 @@ import (
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"golang.org/x/xerrors"
|
||||
// "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
|
||||
// LibraryScanners is an array of LibraryScanner
|
||||
type LibraryScanners []LibraryScanner
|
||||
|
||||
// Find : find by name
|
||||
func (lss LibraryScanners) Find(path, name string) map[string]types.Library {
|
||||
filtered := map[string]types.Library{}
|
||||
func (lss LibraryScanners) Find(path, name string) map[string]Library {
|
||||
filtered := map[string]Library{}
|
||||
for _, ls := range lss {
|
||||
for _, lib := range ls.Libs {
|
||||
if ls.Path == path && lib.Name == name {
|
||||
filtered[ls.Path] = lib
|
||||
if ls.LockfilePath == path && lib.Name == name {
|
||||
filtered[ls.LockfilePath] = lib
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -40,19 +39,32 @@ func (lss LibraryScanners) Total() (total int) {
|
||||
|
||||
// LibraryScanner has libraries information
|
||||
type LibraryScanner struct {
|
||||
Path string
|
||||
Libs []types.Library
|
||||
Type string
|
||||
Libs []Library
|
||||
|
||||
// The path to the Lockfile is stored.
|
||||
LockfilePath string `json:"path,omitempty"`
|
||||
}
|
||||
|
||||
// Library holds the attribute of a package library
|
||||
type Library struct {
|
||||
Name string
|
||||
Version string
|
||||
|
||||
// The Path to the library in the container image. Empty string when Lockfile scan.
|
||||
// This field is used to convert the result JSON of a `trivy image` using trivy-to-vuls.
|
||||
FilePath string
|
||||
}
|
||||
|
||||
// Scan : scan target library
|
||||
func (s LibraryScanner) Scan() ([]VulnInfo, error) {
|
||||
scanner, err := library.DriverFactory{}.NewDriver(filepath.Base(string(s.Path)))
|
||||
scanner, err := library.NewDriver(s.Type)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("Failed to new a library driver: %w", err)
|
||||
}
|
||||
var vulnerabilities = []VulnInfo{}
|
||||
for _, pkg := range s.Libs {
|
||||
tvulns, err := scanner.Detect(pkg.Name, pkg.Version)
|
||||
tvulns, err := scanner.DetectVulnerabilities(pkg.Name, pkg.Version)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to detect %s vulnerabilities: %w", scanner.Type(), err)
|
||||
}
|
||||
@@ -87,52 +99,57 @@ func (s LibraryScanner) getVulnDetail(tvuln types.DetectedVulnerability) (vinfo
|
||||
|
||||
vinfo.CveID = tvuln.VulnerabilityID
|
||||
vinfo.CveContents = getCveContents(tvuln.VulnerabilityID, vul)
|
||||
if tvuln.FixedVersion != "" {
|
||||
vinfo.LibraryFixedIns = []LibraryFixedIn{
|
||||
{
|
||||
Key: s.GetLibraryKey(),
|
||||
Name: tvuln.PkgName,
|
||||
FixedIn: tvuln.FixedVersion,
|
||||
Path: s.Path,
|
||||
},
|
||||
}
|
||||
vinfo.LibraryFixedIns = []LibraryFixedIn{
|
||||
{
|
||||
Key: s.GetLibraryKey(),
|
||||
Name: tvuln.PkgName,
|
||||
FixedIn: tvuln.FixedVersion,
|
||||
Path: s.LockfilePath,
|
||||
},
|
||||
}
|
||||
return vinfo, nil
|
||||
}
|
||||
|
||||
func getCveContents(cveID string, vul trivyDBTypes.Vulnerability) (contents map[CveContentType]CveContent) {
|
||||
contents = map[CveContentType]CveContent{}
|
||||
func getCveContents(cveID string, vul trivyDBTypes.Vulnerability) (contents map[CveContentType][]CveContent) {
|
||||
contents = map[CveContentType][]CveContent{}
|
||||
refs := []Reference{}
|
||||
for _, refURL := range vul.References {
|
||||
refs = append(refs, Reference{Source: "trivy", Link: refURL})
|
||||
}
|
||||
|
||||
content := CveContent{
|
||||
Type: Trivy,
|
||||
CveID: cveID,
|
||||
Title: vul.Title,
|
||||
Summary: vul.Description,
|
||||
Cvss3Severity: string(vul.Severity),
|
||||
References: refs,
|
||||
contents[Trivy] = []CveContent{
|
||||
{
|
||||
Type: Trivy,
|
||||
CveID: cveID,
|
||||
Title: vul.Title,
|
||||
Summary: vul.Description,
|
||||
Cvss3Severity: string(vul.Severity),
|
||||
References: refs,
|
||||
},
|
||||
}
|
||||
contents[Trivy] = content
|
||||
return contents
|
||||
}
|
||||
|
||||
// LibraryMap is filename and library type
|
||||
var LibraryMap = map[string]string{
|
||||
"package-lock.json": "node",
|
||||
"yarn.lock": "node",
|
||||
"Gemfile.lock": "ruby",
|
||||
"Cargo.lock": "rust",
|
||||
"composer.lock": "php",
|
||||
"Pipfile.lock": "python",
|
||||
"poetry.lock": "python",
|
||||
"package-lock.json": "node",
|
||||
"yarn.lock": "node",
|
||||
"Gemfile.lock": "ruby",
|
||||
"Cargo.lock": "rust",
|
||||
"composer.lock": "php",
|
||||
"Pipfile.lock": "python",
|
||||
"poetry.lock": "python",
|
||||
"packages.lock.json": ".net",
|
||||
"go.sum": "gomod",
|
||||
}
|
||||
|
||||
// GetLibraryKey returns target library key
|
||||
func (s LibraryScanner) GetLibraryKey() string {
|
||||
fileName := filepath.Base(s.Path)
|
||||
fileName := filepath.Base(s.LockfilePath)
|
||||
switch s.Type {
|
||||
case "jar", "war", "ear":
|
||||
return "java"
|
||||
}
|
||||
return LibraryMap[fileName]
|
||||
}
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@ package models
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
func TestLibraryScanners_Find(t *testing.T) {
|
||||
@@ -16,14 +14,14 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
name string
|
||||
lss LibraryScanners
|
||||
args args
|
||||
want map[string]types.Library
|
||||
want map[string]Library
|
||||
}{
|
||||
{
|
||||
name: "single file",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathA",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -32,7 +30,7 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libA"},
|
||||
want: map[string]types.Library{
|
||||
want: map[string]Library{
|
||||
"/pathA": {
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -43,8 +41,8 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
name: "multi file",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathA",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -52,8 +50,8 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "/pathB",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathB",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.5",
|
||||
@@ -62,7 +60,7 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libA"},
|
||||
want: map[string]types.Library{
|
||||
want: map[string]Library{
|
||||
"/pathA": {
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -73,8 +71,8 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
name: "miss",
|
||||
lss: LibraryScanners{
|
||||
{
|
||||
Path: "/pathA",
|
||||
Libs: []types.Library{
|
||||
LockfilePath: "/pathA",
|
||||
Libs: []Library{
|
||||
{
|
||||
Name: "libA",
|
||||
Version: "1.0.0",
|
||||
@@ -83,7 +81,7 @@ func TestLibraryScanners_Find(t *testing.T) {
|
||||
},
|
||||
},
|
||||
args: args{"/pathA", "libB"},
|
||||
want: map[string]types.Library{},
|
||||
want: map[string]Library{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
|
||||
@@ -382,7 +382,7 @@ func Test_IsRaspbianPackage(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func Test_parseListenPorts(t *testing.T) {
|
||||
func Test_NewPortStat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
args string
|
||||
@@ -423,7 +423,7 @@ func Test_parseListenPorts(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error occurred: %s", err)
|
||||
} else if !reflect.DeepEqual(*listenPort, tt.expect) {
|
||||
t.Errorf("base.parseListenPorts() = %v, want %v", *listenPort, tt.expect)
|
||||
t.Errorf("base.NewPortStat() = %v, want %v", *listenPort, tt.expect)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -105,13 +105,12 @@ func (r *ScanResult) FilterInactiveWordPressLibs(detectInactive bool) {
|
||||
return false
|
||||
})
|
||||
r.ScannedCves = filtered
|
||||
return
|
||||
}
|
||||
|
||||
// ReportFileName returns the filename on localhost without extension
|
||||
func (r ScanResult) ReportFileName() (name string) {
|
||||
if r.Container.ContainerID == "" {
|
||||
return fmt.Sprintf("%s", r.ServerName)
|
||||
return r.ServerName
|
||||
}
|
||||
return fmt.Sprintf("%s@%s", r.Container.Name, r.ServerName)
|
||||
}
|
||||
@@ -246,17 +245,21 @@ func (r ScanResult) FormatMetasploitCveSummary() string {
|
||||
|
||||
// FormatAlertSummary returns a summary of CERT alerts
|
||||
func (r ScanResult) FormatAlertSummary() string {
|
||||
jaCnt := 0
|
||||
enCnt := 0
|
||||
cisaCnt := 0
|
||||
uscertCnt := 0
|
||||
jpcertCnt := 0
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if len(vuln.AlertDict.En) > 0 {
|
||||
enCnt += len(vuln.AlertDict.En)
|
||||
if len(vuln.AlertDict.CISA) > 0 {
|
||||
cisaCnt += len(vuln.AlertDict.CISA)
|
||||
}
|
||||
if len(vuln.AlertDict.Ja) > 0 {
|
||||
jaCnt += len(vuln.AlertDict.Ja)
|
||||
if len(vuln.AlertDict.USCERT) > 0 {
|
||||
uscertCnt += len(vuln.AlertDict.USCERT)
|
||||
}
|
||||
if len(vuln.AlertDict.JPCERT) > 0 {
|
||||
jpcertCnt += len(vuln.AlertDict.JPCERT)
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("en: %d, ja: %d alerts", enCnt, jaCnt)
|
||||
return fmt.Sprintf("cisa: %d, uscert: %d, jpcert: %d alerts", cisaCnt, uscertCnt, jpcertCnt)
|
||||
}
|
||||
|
||||
func (r ScanResult) isDisplayUpdatableNum(mode config.ScanMode) bool {
|
||||
@@ -291,12 +294,11 @@ func (r ScanResult) IsContainer() bool {
|
||||
}
|
||||
|
||||
// RemoveRaspbianPackFromResult is for Raspberry Pi and removes the Raspberry Pi dedicated package from ScanResult.
|
||||
func (r ScanResult) RemoveRaspbianPackFromResult() ScanResult {
|
||||
func (r ScanResult) RemoveRaspbianPackFromResult() *ScanResult {
|
||||
if r.Family != constant.Raspbian {
|
||||
return r
|
||||
return &r
|
||||
}
|
||||
|
||||
result := r
|
||||
packs := make(Packages)
|
||||
for _, pack := range r.Packages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
@@ -307,14 +309,13 @@ func (r ScanResult) RemoveRaspbianPackFromResult() ScanResult {
|
||||
for _, pack := range r.SrcPackages {
|
||||
if !IsRaspbianPackage(pack.Name, pack.Version) {
|
||||
srcPacks[pack.Name] = pack
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
result.Packages = packs
|
||||
result.SrcPackages = srcPacks
|
||||
r.Packages = packs
|
||||
r.SrcPackages = srcPacks
|
||||
|
||||
return result
|
||||
return &r
|
||||
}
|
||||
|
||||
// ClearFields clears a given fields of ScanResult
|
||||
@@ -375,44 +376,59 @@ func (r *ScanResult) CheckEOL() {
|
||||
}
|
||||
}
|
||||
|
||||
// SortForJSONOutput sort list elements in the ScanResult to diff in integration-test
|
||||
func (r *ScanResult) SortForJSONOutput() {
|
||||
for k, v := range r.Packages {
|
||||
sort.SliceStable(v.AffectedProcs, func(i, j int) bool {
|
||||
sort.Slice(v.AffectedProcs, func(i, j int) bool {
|
||||
return v.AffectedProcs[i].PID < v.AffectedProcs[j].PID
|
||||
})
|
||||
sort.SliceStable(v.NeedRestartProcs, func(i, j int) bool {
|
||||
sort.Slice(v.NeedRestartProcs, func(i, j int) bool {
|
||||
return v.NeedRestartProcs[i].PID < v.NeedRestartProcs[j].PID
|
||||
})
|
||||
r.Packages[k] = v
|
||||
}
|
||||
for i, v := range r.LibraryScanners {
|
||||
sort.Slice(v.Libs, func(i, j int) bool {
|
||||
switch strings.Compare(v.Libs[i].Name, v.Libs[j].Name) {
|
||||
case -1:
|
||||
return true
|
||||
case 1:
|
||||
return false
|
||||
}
|
||||
return v.Libs[i].Version < v.Libs[j].Version
|
||||
|
||||
})
|
||||
r.LibraryScanners[i] = v
|
||||
}
|
||||
|
||||
for k, v := range r.ScannedCves {
|
||||
sort.SliceStable(v.AffectedPackages, func(i, j int) bool {
|
||||
sort.Slice(v.AffectedPackages, func(i, j int) bool {
|
||||
return v.AffectedPackages[i].Name < v.AffectedPackages[j].Name
|
||||
})
|
||||
sort.SliceStable(v.DistroAdvisories, func(i, j int) bool {
|
||||
sort.Slice(v.DistroAdvisories, func(i, j int) bool {
|
||||
return v.DistroAdvisories[i].AdvisoryID < v.DistroAdvisories[j].AdvisoryID
|
||||
})
|
||||
sort.SliceStable(v.Exploits, func(i, j int) bool {
|
||||
return v.Exploits[i].ID < v.Exploits[j].ID
|
||||
sort.Slice(v.Exploits, func(i, j int) bool {
|
||||
return v.Exploits[i].URL < v.Exploits[j].URL
|
||||
})
|
||||
sort.SliceStable(v.Metasploits, func(i, j int) bool {
|
||||
sort.Slice(v.Metasploits, func(i, j int) bool {
|
||||
return v.Metasploits[i].Name < v.Metasploits[j].Name
|
||||
})
|
||||
for kk, vv := range v.CveContents {
|
||||
sort.SliceStable(vv.References, func(i, j int) bool {
|
||||
return vv.References[i].Link < vv.References[j].Link
|
||||
})
|
||||
v.CveContents[kk] = vv
|
||||
}
|
||||
|
||||
sort.SliceStable(v.AlertDict.En, func(i, j int) bool {
|
||||
return v.AlertDict.En[i].Title < v.AlertDict.En[j].Title
|
||||
})
|
||||
sort.SliceStable(v.AlertDict.Ja, func(i, j int) bool {
|
||||
return v.AlertDict.Ja[i].Title < v.AlertDict.Ja[j].Title
|
||||
sort.Slice(v.Mitigations, func(i, j int) bool {
|
||||
return v.Mitigations[i].URL < v.Mitigations[j].URL
|
||||
})
|
||||
|
||||
v.CveContents.Sort()
|
||||
|
||||
sort.Slice(v.AlertDict.USCERT, func(i, j int) bool {
|
||||
return v.AlertDict.USCERT[i].Title < v.AlertDict.USCERT[j].Title
|
||||
})
|
||||
sort.Slice(v.AlertDict.JPCERT, func(i, j int) bool {
|
||||
return v.AlertDict.JPCERT[i].Title < v.AlertDict.JPCERT[j].Title
|
||||
})
|
||||
sort.Slice(v.AlertDict.CISA, func(i, j int) bool {
|
||||
return v.AlertDict.CISA[i].Title < v.AlertDict.CISA[j].Title
|
||||
})
|
||||
r.ScannedCves[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,16 @@ func TestIsDisplayUpdatableNum(t *testing.T) {
|
||||
family: constant.CentOS,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: constant.Alma,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: constant.Rocky,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: constant.Amazon,
|
||||
@@ -76,6 +86,11 @@ func TestIsDisplayUpdatableNum(t *testing.T) {
|
||||
family: constant.Alpine,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
mode: []byte{config.Fast},
|
||||
family: constant.Fedora,
|
||||
expected: true,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
@@ -94,6 +109,55 @@ func TestIsDisplayUpdatableNum(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemoveRaspbianPackFromResult(t *testing.T) {
|
||||
var tests = []struct {
|
||||
in ScanResult
|
||||
expected ScanResult
|
||||
}{
|
||||
{
|
||||
in: ScanResult{
|
||||
Family: constant.Raspbian,
|
||||
Packages: Packages{
|
||||
"apt": Package{Name: "apt", Version: "1.8.2.1"},
|
||||
"libraspberrypi-dev": Package{Name: "libraspberrypi-dev", Version: "1.20200811-1"},
|
||||
},
|
||||
SrcPackages: SrcPackages{},
|
||||
},
|
||||
expected: ScanResult{
|
||||
Family: constant.Raspbian,
|
||||
Packages: Packages{
|
||||
"apt": Package{Name: "apt", Version: "1.8.2.1"},
|
||||
},
|
||||
SrcPackages: SrcPackages{},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: ScanResult{
|
||||
Family: constant.Debian,
|
||||
Packages: Packages{
|
||||
"apt": Package{Name: "apt", Version: "1.8.2.1"},
|
||||
},
|
||||
SrcPackages: SrcPackages{},
|
||||
},
|
||||
expected: ScanResult{
|
||||
Family: constant.Debian,
|
||||
Packages: Packages{
|
||||
"apt": Package{Name: "apt", Version: "1.8.2.1"},
|
||||
},
|
||||
SrcPackages: SrcPackages{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
r := tt.in
|
||||
r = *r.RemoveRaspbianPackFromResult()
|
||||
if !reflect.DeepEqual(r, tt.expected) {
|
||||
t.Errorf("[%d] expected %+v, actual %+v", i, tt.expected, r)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestScanResult_Sort(t *testing.T) {
|
||||
type fields struct {
|
||||
Packages Packages
|
||||
@@ -131,33 +195,37 @@ func TestScanResult_Sort(t *testing.T) {
|
||||
{AdvisoryID: "adv-2"},
|
||||
},
|
||||
Exploits: []Exploit{
|
||||
{ID: "a"},
|
||||
{ID: "b"},
|
||||
{URL: "a"},
|
||||
{URL: "b"},
|
||||
},
|
||||
Metasploits: []Metasploit{
|
||||
{Name: "a"},
|
||||
{Name: "b"},
|
||||
},
|
||||
CveContents: CveContents{
|
||||
"nvd": CveContent{
|
||||
"nvd": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "a"},
|
||||
Reference{Link: "b"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
"jvn": CveContent{
|
||||
"jvn": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "a"},
|
||||
Reference{Link: "b"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
AlertDict: AlertDict{
|
||||
En: []Alert{
|
||||
USCERT: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
Ja: []Alert{
|
||||
JPCERT: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
CISA: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
@@ -190,33 +258,37 @@ func TestScanResult_Sort(t *testing.T) {
|
||||
{AdvisoryID: "adv-2"},
|
||||
},
|
||||
Exploits: []Exploit{
|
||||
{ID: "a"},
|
||||
{ID: "b"},
|
||||
{URL: "a"},
|
||||
{URL: "b"},
|
||||
},
|
||||
Metasploits: []Metasploit{
|
||||
{Name: "a"},
|
||||
{Name: "b"},
|
||||
},
|
||||
CveContents: CveContents{
|
||||
"nvd": CveContent{
|
||||
"nvd": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "a"},
|
||||
Reference{Link: "b"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
"jvn": CveContent{
|
||||
"jvn": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "a"},
|
||||
Reference{Link: "b"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
AlertDict: AlertDict{
|
||||
En: []Alert{
|
||||
USCERT: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
Ja: []Alert{
|
||||
JPCERT: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
CISA: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
@@ -252,33 +324,37 @@ func TestScanResult_Sort(t *testing.T) {
|
||||
{AdvisoryID: "adv-1"},
|
||||
},
|
||||
Exploits: []Exploit{
|
||||
{ID: "b"},
|
||||
{ID: "a"},
|
||||
{URL: "b"},
|
||||
{URL: "a"},
|
||||
},
|
||||
Metasploits: []Metasploit{
|
||||
{Name: "b"},
|
||||
{Name: "a"},
|
||||
},
|
||||
CveContents: CveContents{
|
||||
"nvd": CveContent{
|
||||
"nvd": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "b"},
|
||||
Reference{Link: "a"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
"jvn": CveContent{
|
||||
"jvn": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "b"},
|
||||
Reference{Link: "a"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
AlertDict: AlertDict{
|
||||
En: []Alert{
|
||||
USCERT: []Alert{
|
||||
{Title: "b"},
|
||||
{Title: "a"},
|
||||
},
|
||||
Ja: []Alert{
|
||||
JPCERT: []Alert{
|
||||
{Title: "b"},
|
||||
{Title: "a"},
|
||||
},
|
||||
CISA: []Alert{
|
||||
{Title: "b"},
|
||||
{Title: "a"},
|
||||
},
|
||||
@@ -311,36 +387,149 @@ func TestScanResult_Sort(t *testing.T) {
|
||||
{AdvisoryID: "adv-2"},
|
||||
},
|
||||
Exploits: []Exploit{
|
||||
{ID: "a"},
|
||||
{ID: "b"},
|
||||
{URL: "a"},
|
||||
{URL: "b"},
|
||||
},
|
||||
Metasploits: []Metasploit{
|
||||
{Name: "a"},
|
||||
{Name: "b"},
|
||||
},
|
||||
CveContents: CveContents{
|
||||
"nvd": CveContent{
|
||||
"nvd": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "a"},
|
||||
Reference{Link: "b"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
"jvn": CveContent{
|
||||
"jvn": []CveContent{{
|
||||
References: References{
|
||||
Reference{Link: "a"},
|
||||
Reference{Link: "b"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
AlertDict: AlertDict{
|
||||
En: []Alert{
|
||||
USCERT: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
Ja: []Alert{
|
||||
JPCERT: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
CISA: []Alert{
|
||||
{Title: "a"},
|
||||
{Title: "b"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss v3",
|
||||
fields: fields{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2014-3591": VulnInfo{
|
||||
CveContents: CveContents{
|
||||
"jvn": []CveContent{
|
||||
{Cvss3Score: 3},
|
||||
{Cvss3Score: 10},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: fields{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2014-3591": VulnInfo{
|
||||
CveContents: CveContents{
|
||||
"jvn": []CveContent{
|
||||
{Cvss3Score: 10},
|
||||
{Cvss3Score: 3},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss3, cvss2, sourceLink",
|
||||
fields: fields{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2014-3591": VulnInfo{
|
||||
CveContents: CveContents{
|
||||
"jvn": []CveContent{
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2023/JVNDB-2023-001210.html",
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2021/JVNDB-2021-001210.html",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: fields{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2014-3591": VulnInfo{
|
||||
CveContents: CveContents{
|
||||
"jvn": []CveContent{
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2021/JVNDB-2021-001210.html",
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 3,
|
||||
SourceLink: "https://jvndb.jvn.jp/ja/contents/2023/JVNDB-2023-001210.html",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sort JVN by cvss3, cvss2",
|
||||
fields: fields{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2014-3591": VulnInfo{
|
||||
CveContents: CveContents{
|
||||
"jvn": []CveContent{
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 1,
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: fields{
|
||||
ScannedCves: VulnInfos{
|
||||
"CVE-2014-3591": VulnInfo{
|
||||
CveContents: CveContents{
|
||||
"jvn": []CveContent{
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 10,
|
||||
},
|
||||
{
|
||||
Cvss3Score: 3,
|
||||
Cvss2Score: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
195
models/utils.go
195
models/utils.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package models
|
||||
@@ -5,116 +6,120 @@ package models
|
||||
import (
|
||||
"strings"
|
||||
|
||||
cvedict "github.com/kotakanbe/go-cve-dictionary/models"
|
||||
cvedict "github.com/vulsio/go-cve-dictionary/models"
|
||||
)
|
||||
|
||||
// ConvertJvnToModel convert JVN to CveContent
|
||||
func ConvertJvnToModel(cveID string, jvn *cvedict.Jvn) *CveContent {
|
||||
if jvn == nil {
|
||||
return nil
|
||||
}
|
||||
// var cpes = []Cpe{}
|
||||
// for _, c := range jvn.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
func ConvertJvnToModel(cveID string, jvns []cvedict.Jvn) []CveContent {
|
||||
cves := []CveContent{}
|
||||
for _, jvn := range jvns {
|
||||
// cpes := []Cpe{}
|
||||
// for _, c := range jvn.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
refs := []Reference{}
|
||||
for _, r := range jvn.References {
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
})
|
||||
}
|
||||
refs := []Reference{}
|
||||
for _, r := range jvn.References {
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
})
|
||||
}
|
||||
|
||||
return &CveContent{
|
||||
Type: Jvn,
|
||||
CveID: cveID,
|
||||
Title: jvn.Title,
|
||||
Summary: jvn.Summary,
|
||||
Cvss2Score: jvn.Cvss2.BaseScore,
|
||||
Cvss2Vector: jvn.Cvss2.VectorString,
|
||||
Cvss2Severity: jvn.Cvss2.Severity,
|
||||
Cvss3Score: jvn.Cvss3.BaseScore,
|
||||
Cvss3Vector: jvn.Cvss3.VectorString,
|
||||
Cvss3Severity: jvn.Cvss3.BaseSeverity,
|
||||
SourceLink: jvn.JvnLink,
|
||||
// Cpes: cpes,
|
||||
References: refs,
|
||||
Published: jvn.PublishedDate,
|
||||
LastModified: jvn.LastModifiedDate,
|
||||
cve := CveContent{
|
||||
Type: Jvn,
|
||||
CveID: cveID,
|
||||
Title: jvn.Title,
|
||||
Summary: jvn.Summary,
|
||||
Cvss2Score: jvn.Cvss2.BaseScore,
|
||||
Cvss2Vector: jvn.Cvss2.VectorString,
|
||||
Cvss2Severity: jvn.Cvss2.Severity,
|
||||
Cvss3Score: jvn.Cvss3.BaseScore,
|
||||
Cvss3Vector: jvn.Cvss3.VectorString,
|
||||
Cvss3Severity: jvn.Cvss3.BaseSeverity,
|
||||
SourceLink: jvn.JvnLink,
|
||||
// Cpes: cpes,
|
||||
References: refs,
|
||||
Published: jvn.PublishedDate,
|
||||
LastModified: jvn.LastModifiedDate,
|
||||
}
|
||||
cves = append(cves, cve)
|
||||
}
|
||||
return cves
|
||||
}
|
||||
|
||||
// ConvertNvdJSONToModel convert NVD to CveContent
|
||||
func ConvertNvdJSONToModel(cveID string, nvd *cvedict.NvdJSON) (*CveContent, []Exploit, []Mitigation) {
|
||||
if nvd == nil {
|
||||
return nil, nil, nil
|
||||
}
|
||||
// var cpes = []Cpe{}
|
||||
// for _, c := range nvd.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
// ConvertNvdToModel convert NVD to CveContent
|
||||
func ConvertNvdToModel(cveID string, nvds []cvedict.Nvd) ([]CveContent, []Exploit, []Mitigation) {
|
||||
cves := []CveContent{}
|
||||
refs := []Reference{}
|
||||
exploits := []Exploit{}
|
||||
mitigations := []Mitigation{}
|
||||
for _, r := range nvd.References {
|
||||
var tags []string
|
||||
if 0 < len(r.Tags) {
|
||||
tags = strings.Split(r.Tags, ",")
|
||||
}
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
Tags: tags,
|
||||
})
|
||||
if strings.Contains(r.Tags, "Exploit") {
|
||||
exploits = append(exploits, Exploit{
|
||||
//TODO Add const to here
|
||||
// https://github.com/vulsio/go-exploitdb/blob/master/models/exploit.go#L13-L18
|
||||
ExploitType: "nvd",
|
||||
URL: r.Link,
|
||||
for _, nvd := range nvds {
|
||||
// cpes := []Cpe{}
|
||||
// for _, c := range nvd.Cpes {
|
||||
// cpes = append(cpes, Cpe{
|
||||
// FormattedString: c.FormattedString,
|
||||
// URI: c.URI,
|
||||
// })
|
||||
// }
|
||||
|
||||
for _, r := range nvd.References {
|
||||
var tags []string
|
||||
if 0 < len(r.Tags) {
|
||||
tags = strings.Split(r.Tags, ",")
|
||||
}
|
||||
refs = append(refs, Reference{
|
||||
Link: r.Link,
|
||||
Source: r.Source,
|
||||
Tags: tags,
|
||||
})
|
||||
if strings.Contains(r.Tags, "Exploit") {
|
||||
exploits = append(exploits, Exploit{
|
||||
//TODO Add const to here
|
||||
// https://github.com/vulsio/go-exploitdb/blob/master/models/exploit.go#L13-L18
|
||||
ExploitType: "nvd",
|
||||
URL: r.Link,
|
||||
})
|
||||
}
|
||||
if strings.Contains(r.Tags, "Mitigation") {
|
||||
mitigations = append(mitigations, Mitigation{
|
||||
CveContentType: Nvd,
|
||||
URL: r.Link,
|
||||
})
|
||||
}
|
||||
}
|
||||
if strings.Contains(r.Tags, "Mitigation") {
|
||||
mitigations = append(mitigations, Mitigation{
|
||||
CveContentType: Nvd,
|
||||
URL: r.Link,
|
||||
})
|
||||
|
||||
cweIDs := []string{}
|
||||
for _, cid := range nvd.Cwes {
|
||||
cweIDs = append(cweIDs, cid.CweID)
|
||||
}
|
||||
}
|
||||
|
||||
cweIDs := []string{}
|
||||
for _, cid := range nvd.Cwes {
|
||||
cweIDs = append(cweIDs, cid.CweID)
|
||||
}
|
||||
desc := []string{}
|
||||
for _, d := range nvd.Descriptions {
|
||||
desc = append(desc, d.Value)
|
||||
}
|
||||
|
||||
desc := []string{}
|
||||
for _, d := range nvd.Descriptions {
|
||||
desc = append(desc, d.Value)
|
||||
cve := CveContent{
|
||||
Type: Nvd,
|
||||
CveID: cveID,
|
||||
Summary: strings.Join(desc, "\n"),
|
||||
Cvss2Score: nvd.Cvss2.BaseScore,
|
||||
Cvss2Vector: nvd.Cvss2.VectorString,
|
||||
Cvss2Severity: nvd.Cvss2.Severity,
|
||||
Cvss3Score: nvd.Cvss3.BaseScore,
|
||||
Cvss3Vector: nvd.Cvss3.VectorString,
|
||||
Cvss3Severity: nvd.Cvss3.BaseSeverity,
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/" + cveID,
|
||||
// Cpes: cpes,
|
||||
CweIDs: cweIDs,
|
||||
References: refs,
|
||||
Published: nvd.PublishedDate,
|
||||
LastModified: nvd.LastModifiedDate,
|
||||
}
|
||||
cves = append(cves, cve)
|
||||
}
|
||||
|
||||
return &CveContent{
|
||||
Type: Nvd,
|
||||
CveID: cveID,
|
||||
Summary: strings.Join(desc, "\n"),
|
||||
Cvss2Score: nvd.Cvss2.BaseScore,
|
||||
Cvss2Vector: nvd.Cvss2.VectorString,
|
||||
Cvss2Severity: nvd.Cvss2.Severity,
|
||||
Cvss3Score: nvd.Cvss3.BaseScore,
|
||||
Cvss3Vector: nvd.Cvss3.VectorString,
|
||||
Cvss3Severity: nvd.Cvss3.BaseSeverity,
|
||||
SourceLink: "https://nvd.nist.gov/vuln/detail/" + cveID,
|
||||
// Cpes: cpes,
|
||||
CweIDs: cweIDs,
|
||||
References: refs,
|
||||
Published: nvd.PublishedDate,
|
||||
LastModified: nvd.LastModifiedDate,
|
||||
}, exploits, mitigations
|
||||
return cves, exploits, mitigations
|
||||
}
|
||||
|
||||
@@ -28,31 +28,46 @@ func (v VulnInfos) Find(f func(VulnInfo) bool) VulnInfos {
|
||||
}
|
||||
|
||||
// FilterByCvssOver return scored vulnerabilities
|
||||
func (v VulnInfos) FilterByCvssOver(over float64) VulnInfos {
|
||||
func (v VulnInfos) FilterByCvssOver(over float64) (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
if over <= v.MaxCvssScore().Value.Score {
|
||||
return true
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
})
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterByConfidenceOver scored vulnerabilities
|
||||
func (v VulnInfos) FilterByConfidenceOver(over int) (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
for _, c := range v.Confidences {
|
||||
if over <= c.Score {
|
||||
return true
|
||||
}
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterIgnoreCves filter function.
|
||||
func (v VulnInfos) FilterIgnoreCves(ignoreCveIDs []string) VulnInfos {
|
||||
func (v VulnInfos) FilterIgnoreCves(ignoreCveIDs []string) (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
for _, c := range ignoreCveIDs {
|
||||
if v.CveID == c {
|
||||
nFiltered++
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterUnfixed filter unfixed CVE-IDs
|
||||
func (v VulnInfos) FilterUnfixed(ignoreUnfixed bool) VulnInfos {
|
||||
func (v VulnInfos) FilterUnfixed(ignoreUnfixed bool) (_ VulnInfos, nFiltered int) {
|
||||
if !ignoreUnfixed {
|
||||
return v
|
||||
return v, 0
|
||||
}
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
// Report cves detected by CPE because Vuls can't know 'fixed' or 'unfixed'
|
||||
@@ -63,24 +78,26 @@ func (v VulnInfos) FilterUnfixed(ignoreUnfixed bool) VulnInfos {
|
||||
for _, p := range v.AffectedPackages {
|
||||
NotFixedAll = NotFixedAll && p.NotFixedYet
|
||||
}
|
||||
if NotFixedAll {
|
||||
nFiltered++
|
||||
}
|
||||
return !NotFixedAll
|
||||
})
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FilterIgnorePkgs is filter function.
|
||||
func (v VulnInfos) FilterIgnorePkgs(ignorePkgsRegexps []string) VulnInfos {
|
||||
func (v VulnInfos) FilterIgnorePkgs(ignorePkgsRegexps []string) (_ VulnInfos, nFiltered int) {
|
||||
regexps := []*regexp.Regexp{}
|
||||
for _, pkgRegexp := range ignorePkgsRegexps {
|
||||
re, err := regexp.Compile(pkgRegexp)
|
||||
if err != nil {
|
||||
logging.Log.Warnf("Failed to parse %s. err: %+v", pkgRegexp, err)
|
||||
continue
|
||||
} else {
|
||||
regexps = append(regexps, re)
|
||||
}
|
||||
regexps = append(regexps, re)
|
||||
}
|
||||
if len(regexps) == 0 {
|
||||
return v
|
||||
return v, 0
|
||||
}
|
||||
|
||||
return v.Find(func(v VulnInfo) bool {
|
||||
@@ -98,19 +115,21 @@ func (v VulnInfos) FilterIgnorePkgs(ignorePkgsRegexps []string) VulnInfos {
|
||||
return true
|
||||
}
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
})
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// FindScoredVulns return scored vulnerabilities
|
||||
func (v VulnInfos) FindScoredVulns() VulnInfos {
|
||||
func (v VulnInfos) FindScoredVulns() (_ VulnInfos, nFiltered int) {
|
||||
return v.Find(func(vv VulnInfo) bool {
|
||||
if 0 < vv.MaxCvss2Score().Value.Score ||
|
||||
0 < vv.MaxCvss3Score().Value.Score {
|
||||
return true
|
||||
}
|
||||
nFiltered++
|
||||
return false
|
||||
})
|
||||
}), nFiltered
|
||||
}
|
||||
|
||||
// ToSortedSlice returns slice of VulnInfos that is sorted by Score, CVE-ID
|
||||
@@ -157,7 +176,7 @@ func (v VulnInfos) CountGroupBySeverity() map[string]int {
|
||||
func (v VulnInfos) FormatCveSummary() string {
|
||||
m := v.CountGroupBySeverity()
|
||||
line := fmt.Sprintf("Total: %d (Critical:%d High:%d Medium:%d Low:%d ?:%d)",
|
||||
m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
m["Critical"]+m["High"]+m["Medium"]+m["Low"]+m["Unknown"],
|
||||
m["Critical"], m["High"], m["Medium"], m["Low"], m["Unknown"])
|
||||
|
||||
nPlus, nMinus := v.CountDiff()
|
||||
@@ -222,7 +241,6 @@ func (ps PackageFixStatuses) Sort() {
|
||||
sort.Slice(ps, func(i, j int) bool {
|
||||
return ps[i].Name < ps[j].Name
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// PackageFixStatus has name and other status about the package
|
||||
@@ -238,7 +256,7 @@ type VulnInfo struct {
|
||||
CveID string `json:"cveID,omitempty"`
|
||||
Confidences Confidences `json:"confidences,omitempty"`
|
||||
AffectedPackages PackageFixStatuses `json:"affectedPackages,omitempty"`
|
||||
DistroAdvisories DistroAdvisories `json:"distroAdvisories,omitempty"` // for Amazon, RHEL, FreeBSD
|
||||
DistroAdvisories DistroAdvisories `json:"distroAdvisories,omitempty"` // for Amazon, RHEL, Fedora, FreeBSD
|
||||
CveContents CveContents `json:"cveContents,omitempty"`
|
||||
Exploits []Exploit `json:"exploits,omitempty"`
|
||||
Metasploits []Metasploit `json:"metasploits,omitempty"`
|
||||
@@ -341,36 +359,52 @@ func (v VulnInfo) CveIDDiffFormat() string {
|
||||
if v.DiffStatus != "" {
|
||||
return fmt.Sprintf("%s %s", v.DiffStatus, v.CveID)
|
||||
}
|
||||
return fmt.Sprintf("%s", v.CveID)
|
||||
return v.CveID
|
||||
}
|
||||
|
||||
// Titles returns title (TUI)
|
||||
func (v VulnInfo) Titles(lang, myFamily string) (values []CveContentStr) {
|
||||
if lang == "ja" {
|
||||
if cont, found := v.CveContents[Jvn]; found && cont.Title != "" {
|
||||
values = append(values, CveContentStr{Jvn, cont.Title})
|
||||
if conts, found := v.CveContents[Jvn]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{Jvn, cont.Title})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RedHat API has one line title.
|
||||
if cont, found := v.CveContents[RedHatAPI]; found && cont.Title != "" {
|
||||
values = append(values, CveContentStr{RedHatAPI, cont.Title})
|
||||
if conts, found := v.CveContents[RedHatAPI]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{RedHatAPI, cont.Title})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GitHub security alerts has a title.
|
||||
if cont, found := v.CveContents[GitHub]; found && cont.Title != "" {
|
||||
values = append(values, CveContentStr{GitHub, cont.Title})
|
||||
if conts, found := v.CveContents[GitHub]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{GitHub, cont.Title})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
order := CveContentTypes{Trivy, Nvd, NewCveContentType(myFamily)}
|
||||
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -393,23 +427,31 @@ func (v VulnInfo) Titles(lang, myFamily string) (values []CveContentStr) {
|
||||
// Summaries returns summaries
|
||||
func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
if lang == "ja" {
|
||||
if cont, found := v.CveContents[Jvn]; found && cont.Summary != "" {
|
||||
summary := cont.Title
|
||||
summary += "\n" + strings.Replace(
|
||||
strings.Replace(cont.Summary, "\n", " ", -1), "\r", " ", -1)
|
||||
values = append(values, CveContentStr{Jvn, summary})
|
||||
if conts, found := v.CveContents[Jvn]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Summary != "" {
|
||||
summary := cont.Title
|
||||
summary += "\n" + strings.Replace(
|
||||
strings.Replace(cont.Summary, "\n", " ", -1), "\r", " ", -1)
|
||||
values = append(values, CveContentStr{Jvn, summary})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
order := CveContentTypes{Trivy, NewCveContentType(myFamily), Nvd, GitHub}
|
||||
order = append(order, AllCveContetTypes.Except(append(order, Jvn)...)...)
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found && cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Summary != "" {
|
||||
summary := strings.Replace(cont.Summary, "\n", " ", -1)
|
||||
values = append(values, CveContentStr{
|
||||
Type: ctype,
|
||||
Value: summary,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -420,11 +462,15 @@ func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
})
|
||||
}
|
||||
|
||||
if v, ok := v.CveContents[WpScan]; ok {
|
||||
values = append(values, CveContentStr{
|
||||
Type: WpScan,
|
||||
Value: v.Title,
|
||||
})
|
||||
if conts, ok := v.CveContents[WpScan]; ok {
|
||||
for _, cont := range conts {
|
||||
if cont.Title != "" {
|
||||
values = append(values, CveContentStr{
|
||||
Type: WpScan,
|
||||
Value: cont.Title,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(values) == 0 {
|
||||
@@ -441,20 +487,22 @@ func (v VulnInfo) Summaries(lang, myFamily string) (values []CveContentStr) {
|
||||
func (v VulnInfo) Cvss2Scores() (values []CveContentCvss) {
|
||||
order := []CveContentType{RedHatAPI, RedHat, Nvd, Jvn}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found {
|
||||
if cont.Cvss2Score == 0 && cont.Cvss2Severity == "" {
|
||||
continue
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Cvss2Score == 0 && cont.Cvss2Severity == "" {
|
||||
continue
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: cont.Cvss2Score,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS2,
|
||||
Score: cont.Cvss2Score,
|
||||
Vector: cont.Cvss2Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss2Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
@@ -464,34 +512,40 @@ func (v VulnInfo) Cvss2Scores() (values []CveContentCvss) {
|
||||
func (v VulnInfo) Cvss3Scores() (values []CveContentCvss) {
|
||||
order := []CveContentType{RedHatAPI, RedHat, Nvd, Jvn}
|
||||
for _, ctype := range order {
|
||||
if cont, found := v.CveContents[ctype]; found {
|
||||
if cont.Cvss3Score == 0 && cont.Cvss3Severity == "" {
|
||||
continue
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Cvss3Score == 0 && cont.Cvss3Severity == "" {
|
||||
continue
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: cont.Cvss3Score,
|
||||
Vector: cont.Cvss3Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
// https://nvd.nist.gov/vuln-metrics/cvss
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: cont.Cvss3Score,
|
||||
Vector: cont.Cvss3Vector,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, ctype := range []CveContentType{Debian, DebianSecurityTracker, Ubuntu, Amazon, Trivy, GitHub, WpScan} {
|
||||
if cont, found := v.CveContents[ctype]; found && cont.Cvss3Severity != "" {
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: severityToCvssScoreRoughly(cont.Cvss3Severity),
|
||||
CalculatedBySeverity: true,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
if conts, found := v.CveContents[ctype]; found {
|
||||
for _, cont := range conts {
|
||||
if cont.Cvss3Severity != "" {
|
||||
values = append(values, CveContentCvss{
|
||||
Type: ctype,
|
||||
Value: Cvss{
|
||||
Type: CVSS3,
|
||||
Score: severityToCvssScoreRoughly(cont.Cvss3Severity),
|
||||
CalculatedBySeverity: true,
|
||||
Severity: strings.ToUpper(cont.Cvss3Severity),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -553,24 +607,28 @@ func (v VulnInfo) MaxCvss2Score() CveContentCvss {
|
||||
|
||||
// AttackVector returns attack vector string
|
||||
func (v VulnInfo) AttackVector() string {
|
||||
for _, cnt := range v.CveContents {
|
||||
if strings.HasPrefix(cnt.Cvss2Vector, "AV:N") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:N") {
|
||||
return "AV:N"
|
||||
} else if strings.HasPrefix(cnt.Cvss2Vector, "AV:A") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:A") {
|
||||
return "AV:A"
|
||||
} else if strings.HasPrefix(cnt.Cvss2Vector, "AV:L") ||
|
||||
strings.Contains(cnt.Cvss3Vector, "AV:L") {
|
||||
return "AV:L"
|
||||
} else if strings.Contains(cnt.Cvss3Vector, "AV:P") {
|
||||
// no AV:P in CVSS v2
|
||||
return "AV:P"
|
||||
for _, conts := range v.CveContents {
|
||||
for _, cont := range conts {
|
||||
if strings.HasPrefix(cont.Cvss2Vector, "AV:N") ||
|
||||
strings.Contains(cont.Cvss3Vector, "AV:N") {
|
||||
return "AV:N"
|
||||
} else if strings.HasPrefix(cont.Cvss2Vector, "AV:A") ||
|
||||
strings.Contains(cont.Cvss3Vector, "AV:A") {
|
||||
return "AV:A"
|
||||
} else if strings.HasPrefix(cont.Cvss2Vector, "AV:L") ||
|
||||
strings.Contains(cont.Cvss3Vector, "AV:L") {
|
||||
return "AV:L"
|
||||
} else if strings.Contains(cont.Cvss3Vector, "AV:P") {
|
||||
// no AV:P in CVSS v2
|
||||
return "AV:P"
|
||||
}
|
||||
}
|
||||
}
|
||||
if cont, found := v.CveContents[DebianSecurityTracker]; found {
|
||||
if attackRange, found := cont.Optional["attack range"]; found {
|
||||
return attackRange
|
||||
if conts, found := v.CveContents[DebianSecurityTracker]; found {
|
||||
for _, cont := range conts {
|
||||
if attackRange, found := cont.Optional["attack range"]; found {
|
||||
return attackRange
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
@@ -755,18 +813,28 @@ type Mitigation struct {
|
||||
URL string `json:"url,omitempty"`
|
||||
}
|
||||
|
||||
// AlertDict has target cve JPCERT and USCERT alert data
|
||||
// AlertDict has target cve JPCERT, USCERT and CISA alert data
|
||||
type AlertDict struct {
|
||||
Ja []Alert `json:"ja"`
|
||||
En []Alert `json:"en"`
|
||||
CISA []Alert `json:"cisa"`
|
||||
JPCERT []Alert `json:"jpcert"`
|
||||
USCERT []Alert `json:"uscert"`
|
||||
}
|
||||
|
||||
// IsEmpty checks if the content of AlertDict is empty
|
||||
func (a AlertDict) IsEmpty() bool {
|
||||
return len(a.CISA) == 0 && len(a.JPCERT) == 0 && len(a.USCERT) == 0
|
||||
}
|
||||
|
||||
// FormatSource returns which source has this alert
|
||||
func (a AlertDict) FormatSource() string {
|
||||
if len(a.En) != 0 || len(a.Ja) != 0 {
|
||||
return "CERT"
|
||||
var s []string
|
||||
if len(a.CISA) != 0 {
|
||||
s = append(s, "CISA")
|
||||
}
|
||||
return ""
|
||||
if len(a.USCERT) != 0 || len(a.JPCERT) != 0 {
|
||||
s = append(s, "CERT")
|
||||
}
|
||||
return strings.Join(s, "/")
|
||||
}
|
||||
|
||||
// Confidences is a list of Confidence
|
||||
@@ -808,53 +876,56 @@ func (c Confidence) String() string {
|
||||
type DetectionMethod string
|
||||
|
||||
const (
|
||||
// CpeNameMatchStr is a String representation of CpeNameMatch
|
||||
CpeNameMatchStr = "CpeNameMatch"
|
||||
// NvdExactVersionMatchStr :
|
||||
NvdExactVersionMatchStr = "NvdExactVersionMatch"
|
||||
|
||||
// YumUpdateSecurityMatchStr is a String representation of YumUpdateSecurityMatch
|
||||
YumUpdateSecurityMatchStr = "YumUpdateSecurityMatch"
|
||||
// NvdRoughVersionMatchStr :
|
||||
NvdRoughVersionMatchStr = "NvdRoughVersionMatch"
|
||||
|
||||
// PkgAuditMatchStr is a String representation of PkgAuditMatch
|
||||
// NvdVendorProductMatchStr :
|
||||
NvdVendorProductMatchStr = "NvdVendorProductMatch"
|
||||
|
||||
// JvnVendorProductMatchStr :
|
||||
JvnVendorProductMatchStr = "JvnVendorProductMatch"
|
||||
|
||||
// PkgAuditMatchStr :
|
||||
PkgAuditMatchStr = "PkgAuditMatch"
|
||||
|
||||
// OvalMatchStr is a String representation of OvalMatch
|
||||
// OvalMatchStr :
|
||||
OvalMatchStr = "OvalMatch"
|
||||
|
||||
// RedHatAPIStr is a String representation of RedHatAPIMatch
|
||||
// RedHatAPIStr is :
|
||||
RedHatAPIStr = "RedHatAPIMatch"
|
||||
|
||||
// DebianSecurityTrackerMatchStr is a String representation of DebianSecurityTrackerMatch
|
||||
// DebianSecurityTrackerMatchStr :
|
||||
DebianSecurityTrackerMatchStr = "DebianSecurityTrackerMatch"
|
||||
|
||||
// TrivyMatchStr is a String representation of Trivy
|
||||
// UbuntuAPIMatchStr :
|
||||
UbuntuAPIMatchStr = "UbuntuAPIMatch"
|
||||
|
||||
// TrivyMatchStr :
|
||||
TrivyMatchStr = "TrivyMatch"
|
||||
|
||||
// ChangelogExactMatchStr is a String representation of ChangelogExactMatch
|
||||
// ChangelogExactMatchStr :
|
||||
ChangelogExactMatchStr = "ChangelogExactMatch"
|
||||
|
||||
// ChangelogLenientMatchStr is a String representation of ChangelogLenientMatch
|
||||
ChangelogLenientMatchStr = "ChangelogLenientMatch"
|
||||
// ChangelogRoughMatchStr :
|
||||
ChangelogRoughMatchStr = "ChangelogRoughMatch"
|
||||
|
||||
// GitHubMatchStr is a String representation of GitHubMatch
|
||||
// GitHubMatchStr :
|
||||
GitHubMatchStr = "GitHubMatch"
|
||||
|
||||
// WpScanMatchStr is a String representation of WordPress VulnDB scanning
|
||||
// WpScanMatchStr :
|
||||
WpScanMatchStr = "WpScanMatch"
|
||||
|
||||
// FailedToGetChangelog is a String representation of FailedToGetChangelog
|
||||
// FailedToGetChangelog :
|
||||
FailedToGetChangelog = "FailedToGetChangelog"
|
||||
|
||||
// FailedToFindVersionInChangelog is a String representation of FailedToFindVersionInChangelog
|
||||
// FailedToFindVersionInChangelog :
|
||||
FailedToFindVersionInChangelog = "FailedToFindVersionInChangelog"
|
||||
)
|
||||
|
||||
var (
|
||||
// CpeNameMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
CpeNameMatch = Confidence{100, CpeNameMatchStr, 1}
|
||||
|
||||
// YumUpdateSecurityMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
YumUpdateSecurityMatch = Confidence{100, YumUpdateSecurityMatchStr, 2}
|
||||
|
||||
// PkgAuditMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
PkgAuditMatch = Confidence{100, PkgAuditMatchStr, 2}
|
||||
|
||||
@@ -867,18 +938,33 @@ var (
|
||||
// DebianSecurityTrackerMatch ranking how confident the CVE-ID was detected correctly
|
||||
DebianSecurityTrackerMatch = Confidence{100, DebianSecurityTrackerMatchStr, 0}
|
||||
|
||||
// UbuntuAPIMatch ranking how confident the CVE-ID was detected correctly
|
||||
UbuntuAPIMatch = Confidence{100, UbuntuAPIMatchStr, 0}
|
||||
|
||||
// TrivyMatch ranking how confident the CVE-ID was detected correctly
|
||||
TrivyMatch = Confidence{100, TrivyMatchStr, 0}
|
||||
|
||||
// ChangelogExactMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogExactMatch = Confidence{95, ChangelogExactMatchStr, 3}
|
||||
|
||||
// ChangelogLenientMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogLenientMatch = Confidence{50, ChangelogLenientMatchStr, 4}
|
||||
// ChangelogRoughMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
ChangelogRoughMatch = Confidence{50, ChangelogRoughMatchStr, 4}
|
||||
|
||||
// GitHubMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
GitHubMatch = Confidence{97, GitHubMatchStr, 2}
|
||||
GitHubMatch = Confidence{100, GitHubMatchStr, 2}
|
||||
|
||||
// WpScanMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
WpScanMatch = Confidence{100, WpScanMatchStr, 0}
|
||||
|
||||
// NvdExactVersionMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
NvdExactVersionMatch = Confidence{100, NvdExactVersionMatchStr, 1}
|
||||
|
||||
// NvdRoughVersionMatch NvdExactVersionMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
NvdRoughVersionMatch = Confidence{80, NvdRoughVersionMatchStr, 1}
|
||||
|
||||
// NvdVendorProductMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
NvdVendorProductMatch = Confidence{10, NvdVendorProductMatchStr, 9}
|
||||
|
||||
// JvnVendorProductMatch is a ranking how confident the CVE-ID was detected correctly
|
||||
JvnVendorProductMatch = Confidence{10, JvnVendorProductMatchStr, 10}
|
||||
)
|
||||
|
||||
@@ -21,19 +21,19 @@ func TestTitles(t *testing.T) {
|
||||
lang: "ja",
|
||||
cont: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Title: "Title1",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Summary: "Summary RedHat",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Summary: "Summary NVD",
|
||||
// Severity is NOT included in NVD
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -58,19 +58,19 @@ func TestTitles(t *testing.T) {
|
||||
lang: "en",
|
||||
cont: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Title: "Title1",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Summary: "Summary RedHat",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Summary: "Summary NVD",
|
||||
// Severity is NOT included in NVD
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -122,20 +122,20 @@ func TestSummaries(t *testing.T) {
|
||||
lang: "ja",
|
||||
cont: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Title: "Title JVN",
|
||||
Summary: "Summary JVN",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Summary: "Summary RedHat",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Summary: "Summary NVD",
|
||||
// Severity is NOT included in NVD
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -160,20 +160,20 @@ func TestSummaries(t *testing.T) {
|
||||
lang: "en",
|
||||
cont: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Title: "Title JVN",
|
||||
Summary: "Summary JVN",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Summary: "Summary RedHat",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Summary: "Summary NVD",
|
||||
// Severity is NOT included in NVD
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -220,32 +220,32 @@ func TestCountGroupBySeverity(t *testing.T) {
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss3Score: 6.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss3Score: 2.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0004": {
|
||||
CveID: "CVE-2017-0004",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss3Score: 5.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0005": {
|
||||
@@ -254,10 +254,10 @@ func TestCountGroupBySeverity(t *testing.T) {
|
||||
"CVE-2017-0006": {
|
||||
CveID: "CVE-2017-0005",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss3Score: 10.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -274,32 +274,32 @@ func TestCountGroupBySeverity(t *testing.T) {
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 1.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0003": {
|
||||
CveID: "CVE-2017-0003",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 2.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0004": {
|
||||
CveID: "CVE-2017-0004",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 5.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0005": {
|
||||
@@ -308,10 +308,10 @@ func TestCountGroupBySeverity(t *testing.T) {
|
||||
"CVE-2017-0006": {
|
||||
CveID: "CVE-2017-0005",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 10.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -346,27 +346,27 @@ func TestToSortedSlice(t *testing.T) {
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 6.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 7.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 8.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -374,27 +374,27 @@ func TestToSortedSlice(t *testing.T) {
|
||||
{
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 7.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 8.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 6.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -405,23 +405,23 @@ func TestToSortedSlice(t *testing.T) {
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 6.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: CveContents{
|
||||
RedHat: {
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -429,23 +429,23 @@ func TestToSortedSlice(t *testing.T) {
|
||||
{
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: CveContents{
|
||||
RedHat: {
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 6.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -456,19 +456,19 @@ func TestToSortedSlice(t *testing.T) {
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "High",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "Low",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -476,19 +476,19 @@ func TestToSortedSlice(t *testing.T) {
|
||||
{
|
||||
CveID: "CVE-2017-0002",
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "High",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
CveID: "CVE-2017-0001",
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "Low",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -510,31 +510,31 @@ func TestCvss2Scores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.2,
|
||||
Cvss2Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.0,
|
||||
Cvss2Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 8.1,
|
||||
Cvss2Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
Cvss2Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
//v3
|
||||
RedHatAPI: {
|
||||
RedHatAPI: []CveContent{{
|
||||
Type: RedHatAPI,
|
||||
Cvss3Score: 8.1,
|
||||
Cvss3Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
Cvss3Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentCvss{
|
||||
@@ -590,24 +590,24 @@ func TestMaxCvss2Scores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.2,
|
||||
Cvss2Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.0,
|
||||
Cvss2Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 8.1,
|
||||
Cvss2Vector: "AV:N/AC:L/Au:N/C:N/I:N/A:P",
|
||||
// Severity is NOT included in NVD
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: CveContentCvss{
|
||||
@@ -650,18 +650,18 @@ func TestCvss3Scores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
RedHat: {
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Severity: "HIGH",
|
||||
Cvss3Score: 8.0,
|
||||
Cvss3Vector: "AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:L/A:L",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 8.1,
|
||||
Cvss2Vector: "AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:L/A:L",
|
||||
Cvss2Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentCvss{
|
||||
@@ -680,10 +680,10 @@ func TestCvss3Scores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: []CveContentCvss{
|
||||
@@ -720,12 +720,12 @@ func TestMaxCvss3Scores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
RedHat: {
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Severity: "HIGH",
|
||||
Cvss3Score: 8.0,
|
||||
Cvss3Vector: "AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:L/A:L",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: CveContentCvss{
|
||||
@@ -768,14 +768,14 @@ func TestMaxCvssScores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Nvd: {
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss3Score: 7.0,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss2Score: 8.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: CveContentCvss{
|
||||
@@ -789,10 +789,10 @@ func TestMaxCvssScores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
RedHat: {
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Score: 8.0,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: CveContentCvss{
|
||||
@@ -807,10 +807,10 @@ func TestMaxCvssScores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: CveContentCvss{
|
||||
@@ -827,15 +827,15 @@ func TestMaxCvssScores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "MEDIUM",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 7.0,
|
||||
Cvss2Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: CveContentCvss{
|
||||
@@ -871,15 +871,15 @@ func TestMaxCvssScores(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Ubuntu: {
|
||||
Ubuntu: []CveContent{{
|
||||
Type: Ubuntu,
|
||||
Cvss3Severity: "MEDIUM",
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 4.0,
|
||||
Cvss2Severity: "MEDIUM",
|
||||
},
|
||||
}},
|
||||
},
|
||||
DistroAdvisories: []DistroAdvisory{
|
||||
{
|
||||
@@ -925,21 +925,21 @@ func TestFormatMaxCvssScore(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.3,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss3Severity: "HIGH",
|
||||
Cvss3Score: 8.0,
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 8.1,
|
||||
// Severity is NOT included in NVD
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: "8.0 HIGH (redhat)",
|
||||
@@ -947,22 +947,22 @@ func TestFormatMaxCvssScore(t *testing.T) {
|
||||
{
|
||||
in: VulnInfo{
|
||||
CveContents: CveContents{
|
||||
Jvn: {
|
||||
Jvn: []CveContent{{
|
||||
Type: Jvn,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.3,
|
||||
},
|
||||
RedHat: {
|
||||
}},
|
||||
RedHat: []CveContent{{
|
||||
Type: RedHat,
|
||||
Cvss2Severity: "HIGH",
|
||||
Cvss2Score: 8.0,
|
||||
Cvss3Severity: "HIGH",
|
||||
Cvss3Score: 9.9,
|
||||
},
|
||||
Nvd: {
|
||||
}},
|
||||
Nvd: []CveContent{{
|
||||
Type: Nvd,
|
||||
Cvss2Score: 8.1,
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
out: "9.9 HIGH (redhat)",
|
||||
@@ -1037,20 +1037,20 @@ func TestAppendIfMissing(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
in: Confidences{
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
},
|
||||
arg: CpeNameMatch,
|
||||
arg: NvdExactVersionMatch,
|
||||
out: Confidences{
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
},
|
||||
},
|
||||
{
|
||||
in: Confidences{
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
},
|
||||
arg: ChangelogExactMatch,
|
||||
out: Confidences{
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
ChangelogExactMatch,
|
||||
},
|
||||
},
|
||||
@@ -1071,21 +1071,21 @@ func TestSortByConfident(t *testing.T) {
|
||||
{
|
||||
in: Confidences{
|
||||
OvalMatch,
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
},
|
||||
out: Confidences{
|
||||
OvalMatch,
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
},
|
||||
},
|
||||
{
|
||||
in: Confidences{
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
OvalMatch,
|
||||
},
|
||||
out: Confidences{
|
||||
OvalMatch,
|
||||
CpeNameMatch,
|
||||
NvdExactVersionMatch,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1247,10 +1247,11 @@ func TestVulnInfos_FilterByCvssOver(t *testing.T) {
|
||||
over float64
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
nwant int
|
||||
}{
|
||||
{
|
||||
name: "over 7.0",
|
||||
@@ -1296,6 +1297,7 @@ func TestVulnInfos_FilterByCvssOver(t *testing.T) {
|
||||
),
|
||||
},
|
||||
},
|
||||
nwant: 1,
|
||||
want: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
@@ -1404,9 +1406,13 @@ func TestVulnInfos_FilterByCvssOver(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.v.FilterByCvssOver(tt.args.over); !reflect.DeepEqual(got, tt.want) {
|
||||
got, ngot := tt.v.FilterByCvssOver(tt.args.over)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("VulnInfos.FindByCvssOver() = %v, want %v", got, tt.want)
|
||||
}
|
||||
if ngot != tt.nwant {
|
||||
t.Errorf("VulnInfos.FindByCvssOver() = %d, want %d", ngot, tt.nwant)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1416,10 +1422,11 @@ func TestVulnInfos_FilterIgnoreCves(t *testing.T) {
|
||||
ignoreCveIDs []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
nwant int
|
||||
}{
|
||||
{
|
||||
name: "filter ignored",
|
||||
@@ -1435,6 +1442,7 @@ func TestVulnInfos_FilterIgnoreCves(t *testing.T) {
|
||||
CveID: "CVE-2017-0003",
|
||||
},
|
||||
},
|
||||
nwant: 1,
|
||||
want: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
@@ -1447,9 +1455,13 @@ func TestVulnInfos_FilterIgnoreCves(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.v.FilterIgnoreCves(tt.args.ignoreCveIDs); !reflect.DeepEqual(got, tt.want) {
|
||||
got, ngot := tt.v.FilterIgnoreCves(tt.args.ignoreCveIDs)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("VulnInfos.FindIgnoreCves() = %v, want %v", got, tt.want)
|
||||
}
|
||||
if ngot != tt.nwant {
|
||||
t.Errorf("VulnInfos.FindByCvssOver() = %d, want %d", ngot, tt.nwant)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1459,10 +1471,11 @@ func TestVulnInfos_FilterUnfixed(t *testing.T) {
|
||||
ignoreUnfixed bool
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
nwant int
|
||||
}{
|
||||
{
|
||||
name: "filter ok",
|
||||
@@ -1500,6 +1513,7 @@ func TestVulnInfos_FilterUnfixed(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
nwant: 1,
|
||||
want: VulnInfos{
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
@@ -1528,9 +1542,13 @@ func TestVulnInfos_FilterUnfixed(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.v.FilterUnfixed(tt.args.ignoreUnfixed); !reflect.DeepEqual(got, tt.want) {
|
||||
got, ngot := tt.v.FilterUnfixed(tt.args.ignoreUnfixed)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("VulnInfos.FilterUnfixed() = %v, want %v", got, tt.want)
|
||||
}
|
||||
if ngot != tt.nwant {
|
||||
t.Errorf("VulnInfos.FindByCvssOver() = %d, want %d", ngot, tt.nwant)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1540,10 +1558,11 @@ func TestVulnInfos_FilterIgnorePkgs(t *testing.T) {
|
||||
ignorePkgsRegexps []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
nwant int
|
||||
}{
|
||||
{
|
||||
name: "filter pkgs 1",
|
||||
@@ -1559,6 +1578,7 @@ func TestVulnInfos_FilterIgnorePkgs(t *testing.T) {
|
||||
CveID: "CVE-2017-0002",
|
||||
},
|
||||
},
|
||||
nwant: 1,
|
||||
want: VulnInfos{
|
||||
"CVE-2017-0002": {
|
||||
CveID: "CVE-2017-0002",
|
||||
@@ -1577,6 +1597,7 @@ func TestVulnInfos_FilterIgnorePkgs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
nwant: 0,
|
||||
want: VulnInfos{
|
||||
"CVE-2017-0001": {
|
||||
CveID: "CVE-2017-0001",
|
||||
@@ -1599,14 +1620,100 @@ func TestVulnInfos_FilterIgnorePkgs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
want: VulnInfos{},
|
||||
nwant: 1,
|
||||
want: VulnInfos{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.v.FilterIgnorePkgs(tt.args.ignorePkgsRegexps); !reflect.DeepEqual(got, tt.want) {
|
||||
got, ngot := tt.v.FilterIgnorePkgs(tt.args.ignorePkgsRegexps)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("VulnInfos.FilterIgnorePkgs() = %v, want %v", got, tt.want)
|
||||
}
|
||||
if ngot != tt.nwant {
|
||||
t.Errorf("VulnInfos.FilterIgnorePkgs() = %d, want %d", ngot, tt.nwant)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestVulnInfos_FilterByConfidenceOver(t *testing.T) {
|
||||
type args struct {
|
||||
over int
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
v VulnInfos
|
||||
args args
|
||||
want VulnInfos
|
||||
nwant int
|
||||
}{
|
||||
{
|
||||
name: "over 0",
|
||||
v: map[string]VulnInfo{
|
||||
"CVE-2021-1111": {
|
||||
CveID: "CVE-2021-1111",
|
||||
Confidences: Confidences{JvnVendorProductMatch},
|
||||
},
|
||||
},
|
||||
args: args{
|
||||
over: 0,
|
||||
},
|
||||
want: map[string]VulnInfo{
|
||||
"CVE-2021-1111": {
|
||||
CveID: "CVE-2021-1111",
|
||||
Confidences: Confidences{JvnVendorProductMatch},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "over 20",
|
||||
v: map[string]VulnInfo{
|
||||
"CVE-2021-1111": {
|
||||
CveID: "CVE-2021-1111",
|
||||
Confidences: Confidences{JvnVendorProductMatch},
|
||||
},
|
||||
},
|
||||
args: args{
|
||||
over: 20,
|
||||
},
|
||||
nwant: 1,
|
||||
want: map[string]VulnInfo{},
|
||||
},
|
||||
{
|
||||
name: "over 100",
|
||||
v: map[string]VulnInfo{
|
||||
"CVE-2021-1111": {
|
||||
CveID: "CVE-2021-1111",
|
||||
Confidences: Confidences{
|
||||
NvdExactVersionMatch,
|
||||
JvnVendorProductMatch,
|
||||
},
|
||||
},
|
||||
},
|
||||
args: args{
|
||||
over: 20,
|
||||
},
|
||||
want: map[string]VulnInfo{
|
||||
"CVE-2021-1111": {
|
||||
CveID: "CVE-2021-1111",
|
||||
Confidences: Confidences{
|
||||
NvdExactVersionMatch,
|
||||
JvnVendorProductMatch,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, ngot := tt.v.FilterByConfidenceOver(tt.args.over)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("VulnInfos.FilterByConfidenceOver() = %v, want %v", got, tt.want)
|
||||
}
|
||||
if ngot != tt.nwant {
|
||||
t.Errorf("VulnInfos.FilterByConfidenceOver() = %d, want %d", ngot, tt.nwant)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -32,7 +33,7 @@ func (o Alpine) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf, r.Family)
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -53,8 +54,8 @@ func (o Alpine) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
return len(relatedDefs.entries), nil
|
||||
}
|
||||
|
||||
func (o Alpine) update(r *models.ScanResult, defPacks defPacks) {
|
||||
cveID := defPacks.def.Advisory.Cves[0].CveID
|
||||
func (o Alpine) update(r *models.ScanResult, defpacks defPacks) {
|
||||
cveID := defpacks.def.Advisory.Cves[0].CveID
|
||||
vinfo, ok := r.ScannedCves[cveID]
|
||||
if !ok {
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", cveID)
|
||||
@@ -64,7 +65,7 @@ func (o Alpine) update(r *models.ScanResult, defPacks defPacks) {
|
||||
}
|
||||
}
|
||||
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages = defpacks.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[cveID] = vinfo
|
||||
}
|
||||
|
||||
223
oval/debian.go
223
oval/debian.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -11,7 +12,7 @@ import (
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
// DebianBase is the base struct of Debian and Ubuntu
|
||||
@@ -19,73 +20,75 @@ type DebianBase struct {
|
||||
Base
|
||||
}
|
||||
|
||||
func (o DebianBase) update(r *models.ScanResult, defPacks defPacks) {
|
||||
ovalContent := *o.convertToModel(&defPacks.def)
|
||||
ovalContent.Type = models.NewCveContentType(o.family)
|
||||
vinfo, ok := r.ScannedCves[defPacks.def.Debian.CveID]
|
||||
if !ok {
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", defPacks.def.Debian.CveID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: defPacks.def.Debian.CveID,
|
||||
Confidences: []models.Confidence{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(ovalContent),
|
||||
func (o DebianBase) update(r *models.ScanResult, defpacks defPacks) {
|
||||
for _, cve := range defpacks.def.Advisory.Cves {
|
||||
ovalContent := o.convertToModel(cve.CveID, &defpacks.def)
|
||||
if ovalContent == nil {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
cveContents := vinfo.CveContents
|
||||
ctype := models.NewCveContentType(o.family)
|
||||
if _, ok := vinfo.CveContents[ctype]; ok {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten",
|
||||
defPacks.def.Debian.CveID)
|
||||
vinfo, ok := r.ScannedCves[cve.CveID]
|
||||
if !ok {
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", cve.CveID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
Confidences: []models.Confidence{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(*ovalContent),
|
||||
}
|
||||
} else {
|
||||
logging.Log.Debugf("%s is also detected by OVAL",
|
||||
defPacks.def.Debian.CveID)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
if r.Family != constant.Raspbian {
|
||||
cveContents := vinfo.CveContents
|
||||
if _, ok := vinfo.CveContents[ovalContent.Type]; ok {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten", cve.CveID)
|
||||
} else {
|
||||
logging.Log.Debugf("%s is also detected by OVAL", cve.CveID)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
} else {
|
||||
if len(vinfo.Confidences) == 0 {
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
cveContents[ovalContent.Type] = []models.CveContent{*ovalContent}
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
// uniq(vinfo.AffectedPackages[].Name + defPacks.binpkgFixstat(map[string(=package name)]fixStat{}))
|
||||
collectBinpkgFixstat := defPacks{
|
||||
binpkgFixstat: map[string]fixStat{},
|
||||
}
|
||||
for packName, fixStatus := range defpacks.binpkgFixstat {
|
||||
collectBinpkgFixstat.binpkgFixstat[packName] = fixStatus
|
||||
}
|
||||
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
isSrcPack: false,
|
||||
}
|
||||
}
|
||||
cveContents[ctype] = ovalContent
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
// uniq(vinfo.PackNames + defPacks.binpkgStat)
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
isSrcPack: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Update package status of source packages.
|
||||
// In the case of Debian based Linux, sometimes source package name is defined as affected package in OVAL.
|
||||
// To display binary package name showed in apt-get, need to convert source name to binary name.
|
||||
for binName := range defPacks.binpkgFixstat {
|
||||
if srcPack, ok := r.SrcPackages.FindByBinName(binName); ok {
|
||||
for _, p := range defPacks.def.AffectedPacks {
|
||||
if p.Name == srcPack.Name {
|
||||
defPacks.binpkgFixstat[binName] = fixStat{
|
||||
notFixedYet: p.NotFixedYet,
|
||||
fixedIn: p.Version,
|
||||
isSrcPack: true,
|
||||
srcPackName: srcPack.Name,
|
||||
// Update package status of source packages.
|
||||
// In the case of Debian based Linux, sometimes source package name is defined as affected package in OVAL.
|
||||
// To display binary package name showed in apt-get, need to convert source name to binary name.
|
||||
for binName := range defpacks.binpkgFixstat {
|
||||
if srcPack, ok := r.SrcPackages.FindByBinName(binName); ok {
|
||||
for _, p := range defpacks.def.AffectedPacks {
|
||||
if p.Name == srcPack.Name {
|
||||
collectBinpkgFixstat.binpkgFixstat[binName] = fixStat{
|
||||
notFixedYet: p.NotFixedYet,
|
||||
fixedIn: p.Version,
|
||||
isSrcPack: true,
|
||||
srcPackName: srcPack.Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[defPacks.def.Debian.CveID] = vinfo
|
||||
vinfo.AffectedPackages = collectBinpkgFixstat.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[cve.CveID] = vinfo
|
||||
}
|
||||
}
|
||||
|
||||
func (o DebianBase) convertToModel(def *ovalmodels.Definition) *models.CveContent {
|
||||
refs := []models.Reference{}
|
||||
func (o DebianBase) convertToModel(cveID string, def *ovalmodels.Definition) *models.CveContent {
|
||||
refs := make([]models.Reference, 0, len(def.References))
|
||||
for _, r := range def.References {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: r.RefURL,
|
||||
@@ -94,14 +97,23 @@ func (o DebianBase) convertToModel(def *ovalmodels.Definition) *models.CveConten
|
||||
})
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
CveID: def.Debian.CveID,
|
||||
Title: def.Title,
|
||||
Summary: def.Description,
|
||||
Cvss2Severity: def.Advisory.Severity,
|
||||
Cvss3Severity: def.Advisory.Severity,
|
||||
References: refs,
|
||||
for _, cve := range def.Advisory.Cves {
|
||||
if cve.CveID != cveID {
|
||||
continue
|
||||
}
|
||||
|
||||
return &models.CveContent{
|
||||
Type: models.NewCveContentType(o.family),
|
||||
CveID: cve.CveID,
|
||||
Title: def.Title,
|
||||
Summary: def.Description,
|
||||
Cvss2Severity: def.Advisory.Severity,
|
||||
Cvss3Severity: def.Advisory.Severity,
|
||||
References: refs,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Debian is the interface for Debian OVAL
|
||||
@@ -142,19 +154,11 @@ func (o Debian) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
|
||||
var relatedDefs ovalResult
|
||||
if o.Cnf.IsFetchViaHTTP() {
|
||||
if r.Family != constant.Raspbian {
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
// OVAL does not support Package for Raspbian, so skip it.
|
||||
result := r.RemoveRaspbianPackFromResult()
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(&result, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if relatedDefs, err = getDefsByPackNameViaHTTP(r, o.Cnf.GetURL()); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf, r.Family)
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -164,16 +168,8 @@ func (o Debian) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
}
|
||||
}()
|
||||
|
||||
if r.Family != constant.Raspbian {
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
// OVAL does not support Package for Raspbian, so skip it.
|
||||
result := r.RemoveRaspbianPackFromResult()
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, &result); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if relatedDefs, err = getDefsByPackNameFromOvalDB(driver, r); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,9 +193,11 @@ func (o Debian) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
}
|
||||
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if cont, ok := vuln.CveContents[models.Debian]; ok {
|
||||
cont.SourceLink = "https://security-tracker.debian.org/tracker/" + cont.CveID
|
||||
vuln.CveContents[models.Debian] = cont
|
||||
if conts, ok := vuln.CveContents[models.Debian]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "https://security-tracker.debian.org/tracker/" + cont.CveID
|
||||
vuln.CveContents[models.Debian][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
return len(relatedDefs.entries), nil
|
||||
@@ -358,6 +356,47 @@ func (o Ubuntu) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
"linux",
|
||||
}
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
case "21":
|
||||
kernelNamesInOval := []string{
|
||||
"linux-aws",
|
||||
"linux-base-sgx",
|
||||
"linux-base",
|
||||
"linux-cloud-tools-common",
|
||||
"linux-cloud-tools-generic",
|
||||
"linux-cloud-tools-lowlatency",
|
||||
"linux-cloud-tools-virtual",
|
||||
"linux-gcp",
|
||||
"linux-generic",
|
||||
"linux-gke",
|
||||
"linux-headers-aws",
|
||||
"linux-headers-gcp",
|
||||
"linux-headers-gke",
|
||||
"linux-headers-oracle",
|
||||
"linux-image-aws",
|
||||
"linux-image-extra-virtual",
|
||||
"linux-image-gcp",
|
||||
"linux-image-generic",
|
||||
"linux-image-gke",
|
||||
"linux-image-lowlatency",
|
||||
"linux-image-oracle",
|
||||
"linux-image-virtual",
|
||||
"linux-lowlatency",
|
||||
"linux-modules-extra-aws",
|
||||
"linux-modules-extra-gcp",
|
||||
"linux-modules-extra-gke",
|
||||
"linux-oracle",
|
||||
"linux-tools-aws",
|
||||
"linux-tools-common",
|
||||
"linux-tools-gcp",
|
||||
"linux-tools-generic",
|
||||
"linux-tools-gke",
|
||||
"linux-tools-host",
|
||||
"linux-tools-lowlatency",
|
||||
"linux-tools-oracle",
|
||||
"linux-tools-virtual",
|
||||
"linux-virtual",
|
||||
}
|
||||
return o.fillWithOval(r, kernelNamesInOval)
|
||||
}
|
||||
return 0, fmt.Errorf("Ubuntu %s is not support for now", r.Release)
|
||||
}
|
||||
@@ -433,13 +472,13 @@ func (o Ubuntu) fillWithOval(r *models.ScanResult, kernelNamesInOval []string) (
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf, r.Family)
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v")
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -473,9 +512,11 @@ func (o Ubuntu) fillWithOval(r *models.ScanResult, kernelNamesInOval []string) (
|
||||
}
|
||||
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if cont, ok := vuln.CveContents[models.Ubuntu]; ok {
|
||||
cont.SourceLink = "http://people.ubuntu.com/~ubuntu-security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.Ubuntu] = cont
|
||||
if conts, ok := vuln.CveContents[models.Ubuntu]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "http://people.ubuntu.com/~ubuntu-security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.Ubuntu][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
return len(relatedDefs.entries), nil
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -7,7 +8,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
func TestPackNamesOfUpdateDebian(t *testing.T) {
|
||||
@@ -29,8 +30,8 @@ func TestPackNamesOfUpdateDebian(t *testing.T) {
|
||||
},
|
||||
defPacks: defPacks{
|
||||
def: ovalmodels.Definition{
|
||||
Debian: ovalmodels.Debian{
|
||||
CveID: "CVE-2000-1000",
|
||||
Advisory: ovalmodels.Advisory{
|
||||
Cves: []ovalmodels.Cve{{CveID: "CVE-2000-1000"}},
|
||||
},
|
||||
},
|
||||
binpkgFixstat: map[string]fixStat{
|
||||
@@ -52,15 +53,68 @@ func TestPackNamesOfUpdateDebian(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: models.ScanResult{
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2000-1000": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packA"},
|
||||
},
|
||||
},
|
||||
"CVE-2000-1001": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packC"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
defPacks: defPacks{
|
||||
def: ovalmodels.Definition{
|
||||
Advisory: ovalmodels.Advisory{
|
||||
Cves: []ovalmodels.Cve{
|
||||
{
|
||||
CveID: "CVE-2000-1000",
|
||||
},
|
||||
{
|
||||
CveID: "CVE-2000-1001",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
binpkgFixstat: map[string]fixStat{
|
||||
"packB": {
|
||||
notFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
out: models.ScanResult{
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2000-1000": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packA"},
|
||||
{Name: "packB", NotFixedYet: false},
|
||||
},
|
||||
},
|
||||
"CVE-2000-1001": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packB", NotFixedYet: false},
|
||||
{Name: "packC"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// util.Log = util.NewCustomLogger()
|
||||
for i, tt := range tests {
|
||||
Debian{}.update(&tt.in, tt.defPacks)
|
||||
e := tt.out.ScannedCves["CVE-2000-1000"].AffectedPackages
|
||||
a := tt.in.ScannedCves["CVE-2000-1000"].AffectedPackages
|
||||
if !reflect.DeepEqual(a, e) {
|
||||
t.Errorf("[%d] expected: %#v\n actual: %#v\n", i, e, a)
|
||||
for cveid := range tt.out.ScannedCves {
|
||||
e := tt.out.ScannedCves[cveid].AffectedPackages
|
||||
a := tt.in.ScannedCves[cveid].AffectedPackages
|
||||
if !reflect.DeepEqual(a, e) {
|
||||
t.Errorf("[%d] expected: %v\n actual: %v\n", i, e, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
package oval
|
||||
57
oval/oval.go
57
oval/oval.go
@@ -1,17 +1,20 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"github.com/kotakanbe/goval-dictionary/db"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"github.com/vulsio/goval-dictionary/db"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
@@ -32,28 +35,32 @@ type Base struct {
|
||||
func (b Base) CheckIfOvalFetched(osFamily, release string) (fetched bool, err error) {
|
||||
ovalFamily, err := GetFamilyInOval(osFamily)
|
||||
if err != nil {
|
||||
return false, err
|
||||
return false, xerrors.Errorf("Failed to GetFamilyInOval. err: %w", err)
|
||||
}
|
||||
ovalRelease := release
|
||||
if osFamily == constant.CentOS {
|
||||
ovalRelease = strings.TrimPrefix(release, "stream")
|
||||
}
|
||||
if !b.Cnf.IsFetchViaHTTP() {
|
||||
driver, err := newOvalDB(b.Cnf, ovalFamily)
|
||||
driver, err := newOvalDB(b.Cnf)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v")
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
count, err := driver.CountDefs(ovalFamily, release)
|
||||
count, err := driver.CountDefs(ovalFamily, ovalRelease)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to count OVAL defs: %s, %s, %w", ovalFamily, release, err)
|
||||
return false, xerrors.Errorf("Failed to count OVAL defs: %s, %s, %w", ovalFamily, ovalRelease, err)
|
||||
}
|
||||
logging.Log.Infof("OVAL %s %s found. defs: %d", osFamily, release, count)
|
||||
logging.Log.Infof("OVAL %s %s found. defs: %d", ovalFamily, ovalRelease, count)
|
||||
return 0 < count, nil
|
||||
}
|
||||
|
||||
url, _ := util.URLPathJoin(config.Conf.OvalDict.URL, "count", ovalFamily, release)
|
||||
url, _ := util.URLPathJoin(config.Conf.OvalDict.URL, "count", ovalFamily, ovalRelease)
|
||||
resp, body, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return false, xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
@@ -62,7 +69,7 @@ func (b Base) CheckIfOvalFetched(osFamily, release string) (fetched bool, err er
|
||||
if err := json.Unmarshal([]byte(body), &count); err != nil {
|
||||
return false, xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
|
||||
}
|
||||
logging.Log.Infof("OVAL %s %s is fresh. defs: %d", osFamily, release, count)
|
||||
logging.Log.Infof("OVAL %s %s found. defs: %d", ovalFamily, ovalRelease, count)
|
||||
return 0 < count, nil
|
||||
}
|
||||
|
||||
@@ -70,22 +77,29 @@ func (b Base) CheckIfOvalFetched(osFamily, release string) (fetched bool, err er
|
||||
func (b Base) CheckIfOvalFresh(osFamily, release string) (ok bool, err error) {
|
||||
ovalFamily, err := GetFamilyInOval(osFamily)
|
||||
if err != nil {
|
||||
return false, err
|
||||
return false, xerrors.Errorf("Failed to GetFamilyInOval. err: %w", err)
|
||||
}
|
||||
ovalRelease := release
|
||||
if osFamily == constant.CentOS {
|
||||
ovalRelease = strings.TrimPrefix(release, "stream")
|
||||
}
|
||||
var lastModified time.Time
|
||||
if !b.Cnf.IsFetchViaHTTP() {
|
||||
driver, err := newOvalDB(b.Cnf, ovalFamily)
|
||||
driver, err := newOvalDB(b.Cnf)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v")
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
lastModified = driver.GetLastModified(ovalFamily, release)
|
||||
lastModified, err = driver.GetLastModified(ovalFamily, ovalRelease)
|
||||
if err != nil {
|
||||
return false, xerrors.Errorf("Failed to GetLastModified: %w", err)
|
||||
}
|
||||
} else {
|
||||
url, _ := util.URLPathJoin(config.Conf.OvalDict.URL, "lastmodified", ovalFamily, release)
|
||||
url, _ := util.URLPathJoin(config.Conf.OvalDict.URL, "lastmodified", ovalFamily, ovalRelease)
|
||||
resp, body, errs := gorequest.New().Timeout(10 * time.Second).Get(url).End()
|
||||
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
|
||||
return false, xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
|
||||
@@ -99,16 +113,16 @@ func (b Base) CheckIfOvalFresh(osFamily, release string) (ok bool, err error) {
|
||||
since := time.Now()
|
||||
since = since.AddDate(0, 0, -3)
|
||||
if lastModified.Before(since) {
|
||||
logging.Log.Warnf("OVAL for %s %s is old, last modified is %s. It's recommended to update OVAL to improve scanning accuracy. How to update OVAL database, see https://github.com/kotakanbe/goval-dictionary#usage",
|
||||
osFamily, release, lastModified)
|
||||
logging.Log.Warnf("OVAL for %s %s is old, last modified is %s. It's recommended to update OVAL to improve scanning accuracy. How to update OVAL database, see https://github.com/vulsio/goval-dictionary#usage",
|
||||
ovalFamily, ovalRelease, lastModified)
|
||||
return false, nil
|
||||
}
|
||||
logging.Log.Infof("OVAL %s %s is fresh. lastModified: %s", osFamily, release, lastModified.Format(time.RFC3339))
|
||||
logging.Log.Infof("OVAL %s %s is fresh. lastModified: %s", ovalFamily, ovalRelease, lastModified.Format(time.RFC3339))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// NewOvalDB returns oval db client
|
||||
func newOvalDB(cnf config.VulnDictInterface, familyInScanResult string) (driver db.DB, err error) {
|
||||
func newOvalDB(cnf config.VulnDictInterface) (driver db.DB, err error) {
|
||||
if cnf.IsFetchViaHTTP() {
|
||||
return nil, nil
|
||||
}
|
||||
@@ -118,12 +132,7 @@ func newOvalDB(cnf config.VulnDictInterface, familyInScanResult string) (driver
|
||||
path = cnf.GetSQLite3Path()
|
||||
}
|
||||
|
||||
ovalFamily, err := GetFamilyInOval(familyInScanResult)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
driver, locked, err := db.NewDB(ovalFamily, cnf.GetType(), path, cnf.GetDebugSQL())
|
||||
driver, locked, err := db.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), db.Option{})
|
||||
if err != nil {
|
||||
if locked {
|
||||
err = xerrors.Errorf("SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
|
||||
|
||||
179
oval/redhat.go
179
oval/redhat.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -11,10 +12,10 @@ import (
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
// RedHatBase is the base struct for RedHat and CentOS
|
||||
// RedHatBase is the base struct for RedHat, CentOS, Alma, Rocky and Fedora
|
||||
type RedHatBase struct {
|
||||
Base
|
||||
}
|
||||
@@ -27,13 +28,13 @@ func (o RedHatBase) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf, r.Family)
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v")
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -50,14 +51,42 @@ func (o RedHatBase) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
for _, vuln := range r.ScannedCves {
|
||||
switch models.NewCveContentType(o.family) {
|
||||
case models.RedHat:
|
||||
if cont, ok := vuln.CveContents[models.RedHat]; ok {
|
||||
cont.SourceLink = "https://access.redhat.com/security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.RedHat] = cont
|
||||
if conts, ok := vuln.CveContents[models.RedHat]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "https://access.redhat.com/security/cve/" + cont.CveID
|
||||
vuln.CveContents[models.RedHat][i] = cont
|
||||
}
|
||||
}
|
||||
case models.Fedora:
|
||||
for _, d := range vuln.DistroAdvisories {
|
||||
if conts, ok := vuln.CveContents[models.Fedora]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "https://bodhi.fedoraproject.org/updates/" + d.AdvisoryID
|
||||
vuln.CveContents[models.Fedora][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
case models.Oracle:
|
||||
if cont, ok := vuln.CveContents[models.Oracle]; ok {
|
||||
cont.SourceLink = fmt.Sprintf("https://linux.oracle.com/cve/%s.html", cont.CveID)
|
||||
vuln.CveContents[models.Oracle] = cont
|
||||
if conts, ok := vuln.CveContents[models.Oracle]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = fmt.Sprintf("https://linux.oracle.com/cve/%s.html", cont.CveID)
|
||||
vuln.CveContents[models.Oracle][i] = cont
|
||||
}
|
||||
}
|
||||
case models.Amazon:
|
||||
for _, d := range vuln.DistroAdvisories {
|
||||
if conts, ok := vuln.CveContents[models.Amazon]; ok {
|
||||
for i, cont := range conts {
|
||||
if strings.HasPrefix(d.AdvisoryID, "ALAS2022-") {
|
||||
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2022/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2022", "ALAS"))
|
||||
} else if strings.HasPrefix(d.AdvisoryID, "ALAS2-") {
|
||||
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/AL2/%s.html", strings.ReplaceAll(d.AdvisoryID, "ALAS2", "ALAS"))
|
||||
} else if strings.HasPrefix(d.AdvisoryID, "ALAS-") {
|
||||
cont.SourceLink = fmt.Sprintf("https://alas.aws.amazon.com/%s.html", d.AdvisoryID)
|
||||
}
|
||||
vuln.CveContents[models.Amazon][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -92,59 +121,71 @@ var kernelRelatedPackNames = map[string]bool{
|
||||
"kernel-tools": true,
|
||||
"kernel-tools-libs": true,
|
||||
"kernel-tools-libs-devel": true,
|
||||
"kernel-uek": true,
|
||||
"perf": true,
|
||||
"python-perf": true,
|
||||
}
|
||||
|
||||
func (o RedHatBase) update(r *models.ScanResult, defPacks defPacks) (nCVEs int) {
|
||||
ctype := models.NewCveContentType(o.family)
|
||||
for _, cve := range defPacks.def.Advisory.Cves {
|
||||
ovalContent := *o.convertToModel(cve.CveID, &defPacks.def)
|
||||
func (o RedHatBase) update(r *models.ScanResult, defpacks defPacks) (nCVEs int) {
|
||||
for _, cve := range defpacks.def.Advisory.Cves {
|
||||
ovalContent := o.convertToModel(cve.CveID, &defpacks.def)
|
||||
if ovalContent == nil {
|
||||
continue
|
||||
}
|
||||
vinfo, ok := r.ScannedCves[cve.CveID]
|
||||
if !ok {
|
||||
logging.Log.Debugf("%s is newly detected by OVAL: DefID: %s", cve.CveID, defPacks.def.DefinitionID)
|
||||
logging.Log.Debugf("%s is newly detected by OVAL: DefID: %s", cve.CveID, defpacks.def.DefinitionID)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: cve.CveID,
|
||||
Confidences: models.Confidences{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(ovalContent),
|
||||
CveContents: models.NewCveContents(*ovalContent),
|
||||
}
|
||||
nCVEs++
|
||||
} else {
|
||||
cveContents := vinfo.CveContents
|
||||
if v, ok := vinfo.CveContents[ctype]; ok {
|
||||
if v.LastModified.After(ovalContent.LastModified) {
|
||||
logging.Log.Debugf("%s ignored. DefID: %s ", cve.CveID, defPacks.def.DefinitionID)
|
||||
} else {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten. DefID: %s", cve.CveID, defPacks.def.DefinitionID)
|
||||
if v, ok := vinfo.CveContents[ovalContent.Type]; ok {
|
||||
for _, vv := range v {
|
||||
if vv.LastModified.After(ovalContent.LastModified) {
|
||||
logging.Log.Debugf("%s ignored. DefID: %s ", cve.CveID, defpacks.def.DefinitionID)
|
||||
} else {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten. DefID: %s", cve.CveID, defpacks.def.DefinitionID)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logging.Log.Debugf("%s also detected by OVAL. DefID: %s", cve.CveID, defPacks.def.DefinitionID)
|
||||
logging.Log.Debugf("%s also detected by OVAL. DefID: %s", cve.CveID, defpacks.def.DefinitionID)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
cveContents[ctype] = ovalContent
|
||||
cveContents[ovalContent.Type] = []models.CveContent{*ovalContent}
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
vinfo.DistroAdvisories.AppendIfMissing(
|
||||
o.convertToDistroAdvisory(&defPacks.def))
|
||||
o.convertToDistroAdvisory(&defpacks.def))
|
||||
|
||||
// uniq(vinfo.AffectedPackages[].Name + defPacks.binpkgFixstat(map[string(=package name)]fixStat{}))
|
||||
collectBinpkgFixstat := defPacks{
|
||||
binpkgFixstat: map[string]fixStat{},
|
||||
}
|
||||
for packName, fixStatus := range defpacks.binpkgFixstat {
|
||||
collectBinpkgFixstat.binpkgFixstat[packName] = fixStatus
|
||||
}
|
||||
|
||||
// uniq(vinfo.PackNames + defPacks.actuallyAffectedPackNames)
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
if stat, ok := defPacks.binpkgFixstat[pack.Name]; !ok {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
if stat, ok := collectBinpkgFixstat.binpkgFixstat[pack.Name]; !ok {
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
}
|
||||
} else if stat.notFixedYet {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: true,
|
||||
fixedIn: pack.FixedIn,
|
||||
}
|
||||
}
|
||||
}
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages = collectBinpkgFixstat.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[cve.CveID] = vinfo
|
||||
}
|
||||
@@ -153,9 +194,12 @@ func (o RedHatBase) update(r *models.ScanResult, defPacks defPacks) (nCVEs int)
|
||||
|
||||
func (o RedHatBase) convertToDistroAdvisory(def *ovalmodels.Definition) *models.DistroAdvisory {
|
||||
advisoryID := def.Title
|
||||
if (o.family == constant.RedHat || o.family == constant.CentOS) && len(advisoryID) > 0 {
|
||||
ss := strings.Fields(def.Title)
|
||||
advisoryID = strings.TrimSuffix(ss[0], ":")
|
||||
switch o.family {
|
||||
case constant.RedHat, constant.CentOS, constant.Alma, constant.Rocky, constant.Oracle:
|
||||
if def.Title != "" {
|
||||
ss := strings.Fields(def.Title)
|
||||
advisoryID = strings.TrimSuffix(ss[0], ":")
|
||||
}
|
||||
}
|
||||
return &models.DistroAdvisory{
|
||||
AdvisoryID: advisoryID,
|
||||
@@ -167,18 +211,19 @@ func (o RedHatBase) convertToDistroAdvisory(def *ovalmodels.Definition) *models.
|
||||
}
|
||||
|
||||
func (o RedHatBase) convertToModel(cveID string, def *ovalmodels.Definition) *models.CveContent {
|
||||
refs := make([]models.Reference, 0, len(def.References))
|
||||
for _, r := range def.References {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: r.RefURL,
|
||||
Source: r.Source,
|
||||
RefID: r.RefID,
|
||||
})
|
||||
}
|
||||
|
||||
for _, cve := range def.Advisory.Cves {
|
||||
if cve.CveID != cveID {
|
||||
continue
|
||||
}
|
||||
var refs []models.Reference
|
||||
for _, r := range def.References {
|
||||
refs = append(refs, models.Reference{
|
||||
Link: r.RefURL,
|
||||
Source: r.Source,
|
||||
RefID: r.RefID,
|
||||
})
|
||||
}
|
||||
|
||||
score2, vec2 := o.parseCvss2(cve.Cvss2)
|
||||
score3, vec3 := o.parseCvss3(cve.Cvss3)
|
||||
@@ -318,3 +363,57 @@ func NewAmazon(cnf config.VulnDictInterface) Amazon {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Alma is the interface for RedhatBase OVAL
|
||||
type Alma struct {
|
||||
// Base
|
||||
RedHatBase
|
||||
}
|
||||
|
||||
// NewAlma creates OVAL client for Alma Linux
|
||||
func NewAlma(cnf config.VulnDictInterface) Alma {
|
||||
return Alma{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: constant.Alma,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Rocky is the interface for RedhatBase OVAL
|
||||
type Rocky struct {
|
||||
// Base
|
||||
RedHatBase
|
||||
}
|
||||
|
||||
// NewRocky creates OVAL client for Rocky Linux
|
||||
func NewRocky(cnf config.VulnDictInterface) Rocky {
|
||||
return Rocky{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: constant.Rocky,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Fedora is the interface for RedhatBase OVAL
|
||||
type Fedora struct {
|
||||
// Base
|
||||
RedHatBase
|
||||
}
|
||||
|
||||
// NewFedora creates OVAL client for Fedora Linux
|
||||
func NewFedora(cnf config.VulnDictInterface) Fedora {
|
||||
return Fedora{
|
||||
RedHatBase{
|
||||
Base{
|
||||
family: constant.Fedora,
|
||||
Cnf: cnf,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -7,7 +8,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/future-architect/vuls/models"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
func TestParseCvss2(t *testing.T) {
|
||||
@@ -128,15 +129,68 @@ func TestPackNamesOfUpdate(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
in: models.ScanResult{
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2000-1000": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packA"},
|
||||
},
|
||||
},
|
||||
"CVE-2000-1001": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packC"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
defPacks: defPacks{
|
||||
def: ovalmodels.Definition{
|
||||
Advisory: ovalmodels.Advisory{
|
||||
Cves: []ovalmodels.Cve{
|
||||
{
|
||||
CveID: "CVE-2000-1000",
|
||||
},
|
||||
{
|
||||
CveID: "CVE-2000-1001",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
binpkgFixstat: map[string]fixStat{
|
||||
"packB": {
|
||||
notFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
out: models.ScanResult{
|
||||
ScannedCves: models.VulnInfos{
|
||||
"CVE-2000-1000": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packA"},
|
||||
{Name: "packB", NotFixedYet: false},
|
||||
},
|
||||
},
|
||||
"CVE-2000-1001": models.VulnInfo{
|
||||
AffectedPackages: models.PackageFixStatuses{
|
||||
{Name: "packB", NotFixedYet: false},
|
||||
{Name: "packC"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// util.Log = util.Logger{}.NewCustomLogger()
|
||||
for i, tt := range tests {
|
||||
RedHat{}.update(&tt.in, tt.defPacks)
|
||||
e := tt.out.ScannedCves["CVE-2000-1000"].AffectedPackages
|
||||
a := tt.in.ScannedCves["CVE-2000-1000"].AffectedPackages
|
||||
if !reflect.DeepEqual(a, e) {
|
||||
t.Errorf("[%d] expected: %v\n actual: %v\n", i, e, a)
|
||||
for cveid := range tt.out.ScannedCves {
|
||||
e := tt.out.ScannedCves[cveid].AffectedPackages
|
||||
a := tt.in.ScannedCves[cveid].AffectedPackages
|
||||
if !reflect.DeepEqual(a, e) {
|
||||
t.Errorf("[%d] expected: %v\n actual: %v\n", i, e, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
46
oval/suse.go
46
oval/suse.go
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -7,7 +8,7 @@ import (
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
// SUSE is the struct of SUSE Linux
|
||||
@@ -34,13 +35,13 @@ func (o SUSE) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
driver, err := newOvalDB(o.Cnf, r.Family)
|
||||
driver, err := newOvalDB(o.Cnf)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer func() {
|
||||
if err := driver.CloseDB(); err != nil {
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v")
|
||||
logging.Log.Errorf("Failed to close DB. err: %+v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -53,22 +54,24 @@ func (o SUSE) FillWithOval(r *models.ScanResult) (nCVEs int, err error) {
|
||||
}
|
||||
|
||||
for _, vuln := range r.ScannedCves {
|
||||
if cont, ok := vuln.CveContents[models.SUSE]; ok {
|
||||
cont.SourceLink = "https://security-tracker.debian.org/tracker/" + cont.CveID
|
||||
vuln.CveContents[models.SUSE] = cont
|
||||
if conts, ok := vuln.CveContents[models.SUSE]; ok {
|
||||
for i, cont := range conts {
|
||||
cont.SourceLink = "https://security-tracker.debian.org/tracker/" + cont.CveID
|
||||
vuln.CveContents[models.SUSE][i] = cont
|
||||
}
|
||||
}
|
||||
}
|
||||
return len(relatedDefs.entries), nil
|
||||
}
|
||||
|
||||
func (o SUSE) update(r *models.ScanResult, defPacks defPacks) {
|
||||
ovalContent := *o.convertToModel(&defPacks.def)
|
||||
func (o SUSE) update(r *models.ScanResult, defpacks defPacks) {
|
||||
ovalContent := *o.convertToModel(&defpacks.def)
|
||||
ovalContent.Type = models.NewCveContentType(o.family)
|
||||
vinfo, ok := r.ScannedCves[defPacks.def.Title]
|
||||
vinfo, ok := r.ScannedCves[defpacks.def.Title]
|
||||
if !ok {
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", defPacks.def.Title)
|
||||
logging.Log.Debugf("%s is newly detected by OVAL", defpacks.def.Title)
|
||||
vinfo = models.VulnInfo{
|
||||
CveID: defPacks.def.Title,
|
||||
CveID: defpacks.def.Title,
|
||||
Confidences: models.Confidences{models.OvalMatch},
|
||||
CveContents: models.NewCveContents(ovalContent),
|
||||
}
|
||||
@@ -76,26 +79,33 @@ func (o SUSE) update(r *models.ScanResult, defPacks defPacks) {
|
||||
cveContents := vinfo.CveContents
|
||||
ctype := models.NewCveContentType(o.family)
|
||||
if _, ok := vinfo.CveContents[ctype]; ok {
|
||||
logging.Log.Debugf("%s OVAL will be overwritten", defPacks.def.Title)
|
||||
logging.Log.Debugf("%s OVAL will be overwritten", defpacks.def.Title)
|
||||
} else {
|
||||
logging.Log.Debugf("%s is also detected by OVAL", defPacks.def.Title)
|
||||
logging.Log.Debugf("%s is also detected by OVAL", defpacks.def.Title)
|
||||
cveContents = models.CveContents{}
|
||||
}
|
||||
vinfo.Confidences.AppendIfMissing(models.OvalMatch)
|
||||
cveContents[ctype] = ovalContent
|
||||
cveContents[ctype] = []models.CveContent{ovalContent}
|
||||
vinfo.CveContents = cveContents
|
||||
}
|
||||
|
||||
// uniq(vinfo.PackNames + defPacks.actuallyAffectedPackNames)
|
||||
// uniq(vinfo.AffectedPackages[].Name + defPacks.binpkgFixstat(map[string(=package name)]fixStat{}))
|
||||
collectBinpkgFixstat := defPacks{
|
||||
binpkgFixstat: map[string]fixStat{},
|
||||
}
|
||||
for packName, fixStatus := range defpacks.binpkgFixstat {
|
||||
collectBinpkgFixstat.binpkgFixstat[packName] = fixStatus
|
||||
}
|
||||
|
||||
for _, pack := range vinfo.AffectedPackages {
|
||||
defPacks.binpkgFixstat[pack.Name] = fixStat{
|
||||
collectBinpkgFixstat.binpkgFixstat[pack.Name] = fixStat{
|
||||
notFixedYet: pack.NotFixedYet,
|
||||
fixedIn: pack.FixedIn,
|
||||
}
|
||||
}
|
||||
vinfo.AffectedPackages = defPacks.toPackStatuses()
|
||||
vinfo.AffectedPackages = collectBinpkgFixstat.toPackStatuses()
|
||||
vinfo.AffectedPackages.Sort()
|
||||
r.ScannedCves[defPacks.def.Title] = vinfo
|
||||
r.ScannedCves[defpacks.def.Title] = vinfo
|
||||
}
|
||||
|
||||
func (o SUSE) convertToModel(def *ovalmodels.Definition) *models.CveContent {
|
||||
|
||||
138
oval/util.go
138
oval/util.go
@@ -1,12 +1,15 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/cenkalti/backoff"
|
||||
@@ -18,9 +21,9 @@ import (
|
||||
apkver "github.com/knqyf263/go-apk-version"
|
||||
debver "github.com/knqyf263/go-deb-version"
|
||||
rpmver "github.com/knqyf263/go-rpm-version"
|
||||
"github.com/kotakanbe/goval-dictionary/db"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
"github.com/vulsio/goval-dictionary/db"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
@@ -96,7 +99,6 @@ type response struct {
|
||||
|
||||
// getDefsByPackNameViaHTTP fetches OVAL information via HTTP
|
||||
func getDefsByPackNameViaHTTP(r *models.ScanResult, url string) (relatedDefs ovalResult, err error) {
|
||||
|
||||
nReq := len(r.Packages) + len(r.SrcPackages)
|
||||
reqChan := make(chan request, nReq)
|
||||
resChan := make(chan response, nReq)
|
||||
@@ -126,6 +128,14 @@ func getDefsByPackNameViaHTTP(r *models.ScanResult, url string) (relatedDefs ova
|
||||
}
|
||||
}()
|
||||
|
||||
ovalFamily, err := GetFamilyInOval(r.Family)
|
||||
if err != nil {
|
||||
return relatedDefs, xerrors.Errorf("Failed to GetFamilyInOval. err: %w", err)
|
||||
}
|
||||
ovalRelease := r.Release
|
||||
if r.Family == constant.CentOS {
|
||||
ovalRelease = strings.TrimPrefix(r.Release, "stream")
|
||||
}
|
||||
concurrency := 10
|
||||
tasks := util.GenWorkers(concurrency)
|
||||
for i := 0; i < nReq; i++ {
|
||||
@@ -135,8 +145,8 @@ func getDefsByPackNameViaHTTP(r *models.ScanResult, url string) (relatedDefs ova
|
||||
url, err := util.URLPathJoin(
|
||||
url,
|
||||
"packs",
|
||||
r.Family,
|
||||
r.Release,
|
||||
ovalFamily,
|
||||
ovalRelease,
|
||||
req.packName,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -155,7 +165,11 @@ func getDefsByPackNameViaHTTP(r *models.ScanResult, url string) (relatedDefs ova
|
||||
select {
|
||||
case res := <-resChan:
|
||||
for _, def := range res.defs {
|
||||
affected, notFixedYet, fixedIn := isOvalDefAffected(def, res.request, r.Family, r.RunningKernel, r.EnabledDnfModules)
|
||||
affected, notFixedYet, fixedIn, err := isOvalDefAffected(def, res.request, ovalFamily, r.RunningKernel, r.EnabledDnfModules)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
continue
|
||||
}
|
||||
if !affected {
|
||||
continue
|
||||
}
|
||||
@@ -185,7 +199,7 @@ func getDefsByPackNameViaHTTP(r *models.ScanResult, url string) (relatedDefs ova
|
||||
}
|
||||
}
|
||||
if len(errs) != 0 {
|
||||
return relatedDefs, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
|
||||
return relatedDefs, xerrors.Errorf("Failed to detect OVAL. err: %w", errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -253,16 +267,22 @@ func getDefsByPackNameFromOvalDB(driver db.DB, r *models.ScanResult) (relatedDef
|
||||
|
||||
ovalFamily, err := GetFamilyInOval(r.Family)
|
||||
if err != nil {
|
||||
return relatedDefs, err
|
||||
return relatedDefs, xerrors.Errorf("Failed to GetFamilyInOval. err: %w", err)
|
||||
}
|
||||
ovalRelease := r.Release
|
||||
if r.Family == constant.CentOS {
|
||||
ovalRelease = strings.TrimPrefix(r.Release, "stream")
|
||||
}
|
||||
|
||||
for _, req := range requests {
|
||||
definitions, err := driver.GetByPackName(ovalFamily, r.Release, req.packName, req.arch)
|
||||
definitions, err := driver.GetByPackName(ovalFamily, ovalRelease, req.packName, req.arch)
|
||||
if err != nil {
|
||||
return relatedDefs, xerrors.Errorf("Failed to get %s OVAL info by package: %#v, err: %w", r.Family, req, err)
|
||||
}
|
||||
for _, def := range definitions {
|
||||
affected, notFixedYet, fixedIn := isOvalDefAffected(def, req, ovalFamily, r.RunningKernel, r.EnabledDnfModules)
|
||||
affected, notFixedYet, fixedIn, err := isOvalDefAffected(def, req, ovalFamily, r.RunningKernel, r.EnabledDnfModules)
|
||||
if err != nil {
|
||||
return relatedDefs, xerrors.Errorf("Failed to exec isOvalAffected. err: %w", err)
|
||||
}
|
||||
if !affected {
|
||||
continue
|
||||
}
|
||||
@@ -289,16 +309,49 @@ func getDefsByPackNameFromOvalDB(driver db.DB, r *models.ScanResult) (relatedDef
|
||||
return
|
||||
}
|
||||
|
||||
func isOvalDefAffected(def ovalmodels.Definition, req request, family string, running models.Kernel, enabledMods []string) (affected, notFixedYet bool, fixedIn string) {
|
||||
var modularVersionPattern = regexp.MustCompile(`.+\.module(?:\+el|_f)\d{1,2}.*`)
|
||||
|
||||
func isOvalDefAffected(def ovalmodels.Definition, req request, family string, running models.Kernel, enabledMods []string) (affected, notFixedYet bool, fixedIn string, err error) {
|
||||
for _, ovalPack := range def.AffectedPacks {
|
||||
if req.packName != ovalPack.Name {
|
||||
continue
|
||||
}
|
||||
|
||||
switch family {
|
||||
case constant.Oracle, constant.Amazon, constant.Fedora:
|
||||
if ovalPack.Arch == "" {
|
||||
logging.Log.Infof("Arch is needed to detect Vulns for Amazon Linux, Oracle Linux and Fedora, but empty. You need refresh OVAL maybe. oval: %#v, defID: %s", ovalPack, def.DefinitionID)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if ovalPack.Arch != "" && req.arch != ovalPack.Arch {
|
||||
continue
|
||||
}
|
||||
|
||||
// https://github.com/aquasecurity/trivy/pull/745
|
||||
if strings.Contains(req.versionRelease, ".ksplice1.") != strings.Contains(ovalPack.Version, ".ksplice1.") {
|
||||
continue
|
||||
}
|
||||
|
||||
// There is a modular package and a non-modular package with the same name. (e.g. fedora 35 community-mysql)
|
||||
if ovalPack.ModularityLabel == "" && modularVersionPattern.MatchString(req.versionRelease) {
|
||||
continue
|
||||
} else if ovalPack.ModularityLabel != "" && !modularVersionPattern.MatchString(req.versionRelease) {
|
||||
continue
|
||||
}
|
||||
|
||||
isModularityLabelEmptyOrSame := false
|
||||
if ovalPack.ModularityLabel != "" {
|
||||
// expect ovalPack.ModularityLabel e.g. RedHat: nginx:1.16, Fedora: mysql:8.0:3520211031142409:f27b74a8
|
||||
ss := strings.Split(ovalPack.ModularityLabel, ":")
|
||||
if len(ss) < 2 {
|
||||
logging.Log.Warnf("Invalid modularitylabel format in oval package. Maybe it is necessary to fix modularitylabel of goval-dictionary. expected: ${name}:${stream}(:${version}:${context}:${arch}), actual: %s", ovalPack.ModularityLabel)
|
||||
continue
|
||||
}
|
||||
modularityNameStreamLabel := fmt.Sprintf("%s:%s", ss[0], ss[1])
|
||||
for _, mod := range enabledMods {
|
||||
if mod == ovalPack.ModularityLabel {
|
||||
if mod == modularityNameStreamLabel {
|
||||
isModularityLabelEmptyOrSame = true
|
||||
break
|
||||
}
|
||||
@@ -312,7 +365,7 @@ func isOvalDefAffected(def ovalmodels.Definition, req request, family string, ru
|
||||
|
||||
if running.Release != "" {
|
||||
switch family {
|
||||
case constant.RedHat, constant.CentOS:
|
||||
case constant.RedHat, constant.CentOS, constant.Alma, constant.Rocky, constant.Oracle, constant.Fedora:
|
||||
// For kernel related packages, ignore OVAL information with different major versions
|
||||
if _, ok := kernelRelatedPackNames[ovalPack.Name]; ok {
|
||||
if util.Major(ovalPack.Version) != util.Major(running.Release) {
|
||||
@@ -323,7 +376,7 @@ func isOvalDefAffected(def ovalmodels.Definition, req request, family string, ru
|
||||
}
|
||||
|
||||
if ovalPack.NotFixedYet {
|
||||
return true, true, ovalPack.Version
|
||||
return true, true, ovalPack.Version, nil
|
||||
}
|
||||
|
||||
// Compare between the installed version vs the version in OVAL
|
||||
@@ -331,27 +384,29 @@ func isOvalDefAffected(def ovalmodels.Definition, req request, family string, ru
|
||||
if err != nil {
|
||||
logging.Log.Debugf("Failed to parse versions: %s, Ver: %#v, OVAL: %#v, DefID: %s",
|
||||
err, req.versionRelease, ovalPack, def.DefinitionID)
|
||||
return false, false, ovalPack.Version
|
||||
return false, false, ovalPack.Version, nil
|
||||
}
|
||||
if less {
|
||||
if req.isSrcPack {
|
||||
// Unable to judge whether fixed or not-fixed of src package(Ubuntu, Debian)
|
||||
return true, false, ovalPack.Version
|
||||
return true, false, ovalPack.Version, nil
|
||||
}
|
||||
|
||||
// If the version of installed is less than in OVAL
|
||||
switch family {
|
||||
case constant.RedHat,
|
||||
constant.Fedora,
|
||||
constant.Amazon,
|
||||
constant.Oracle,
|
||||
constant.SUSEEnterpriseServer,
|
||||
constant.Debian,
|
||||
constant.Ubuntu,
|
||||
constant.Raspbian:
|
||||
constant.Raspbian,
|
||||
constant.Ubuntu:
|
||||
// Use fixed state in OVAL for these distros.
|
||||
return true, false, ovalPack.Version
|
||||
return true, false, ovalPack.Version, nil
|
||||
}
|
||||
|
||||
// But CentOS can't judge whether fixed or unfixed.
|
||||
// But CentOS/Alma/Rocky can't judge whether fixed or unfixed.
|
||||
// Because fixed state in RHEL OVAL is different.
|
||||
// So, it have to be judged version comparison.
|
||||
|
||||
@@ -359,7 +414,7 @@ func isOvalDefAffected(def ovalmodels.Definition, req request, family string, ru
|
||||
// In these mode, the blow field was set empty.
|
||||
// Vuls can not judge fixed or unfixed.
|
||||
if req.newVersionRelease == "" {
|
||||
return true, false, ovalPack.Version
|
||||
return true, false, ovalPack.Version, nil
|
||||
}
|
||||
|
||||
// compare version: newVer vs oval
|
||||
@@ -367,12 +422,12 @@ func isOvalDefAffected(def ovalmodels.Definition, req request, family string, ru
|
||||
if err != nil {
|
||||
logging.Log.Debugf("Failed to parse versions: %s, NewVer: %#v, OVAL: %#v, DefID: %s",
|
||||
err, req.newVersionRelease, ovalPack, def.DefinitionID)
|
||||
return false, false, ovalPack.Version
|
||||
return false, false, ovalPack.Version, nil
|
||||
}
|
||||
return true, less, ovalPack.Version
|
||||
return true, less, ovalPack.Version, nil
|
||||
}
|
||||
}
|
||||
return false, false, ""
|
||||
return false, false, "", nil
|
||||
}
|
||||
|
||||
func lessThan(family, newVer string, packInOVAL ovalmodels.Package) (bool, error) {
|
||||
@@ -403,15 +458,18 @@ func lessThan(family, newVer string, packInOVAL ovalmodels.Package) (bool, error
|
||||
|
||||
case constant.Oracle,
|
||||
constant.SUSEEnterpriseServer,
|
||||
constant.Amazon:
|
||||
constant.Amazon,
|
||||
constant.Fedora:
|
||||
vera := rpmver.NewVersion(newVer)
|
||||
verb := rpmver.NewVersion(packInOVAL.Version)
|
||||
return vera.LessThan(verb), nil
|
||||
|
||||
case constant.RedHat,
|
||||
constant.CentOS:
|
||||
vera := rpmver.NewVersion(centOSVersionToRHEL(newVer))
|
||||
verb := rpmver.NewVersion(centOSVersionToRHEL(packInOVAL.Version))
|
||||
constant.CentOS,
|
||||
constant.Alma,
|
||||
constant.Rocky:
|
||||
vera := rpmver.NewVersion(rhelRebuildOSVersionToRHEL(newVer))
|
||||
verb := rpmver.NewVersion(rhelRebuildOSVersionToRHEL(packInOVAL.Version))
|
||||
return vera.LessThan(verb), nil
|
||||
|
||||
default:
|
||||
@@ -419,10 +477,10 @@ func lessThan(family, newVer string, packInOVAL ovalmodels.Package) (bool, error
|
||||
}
|
||||
}
|
||||
|
||||
var centosVerPattern = regexp.MustCompile(`\.[es]l(\d+)(?:_\d+)?(?:\.centos)?`)
|
||||
var rhelRebuildOSVerPattern = regexp.MustCompile(`\.[es]l(\d+)(?:_\d+)?(?:\.(centos|rocky|alma))?`)
|
||||
|
||||
func centOSVersionToRHEL(ver string) string {
|
||||
return centosVerPattern.ReplaceAllString(ver, ".el$1")
|
||||
func rhelRebuildOSVersionToRHEL(ver string) string {
|
||||
return rhelRebuildOSVerPattern.ReplaceAllString(ver, ".el$1")
|
||||
}
|
||||
|
||||
// NewOVALClient returns a client for OVAL database
|
||||
@@ -435,8 +493,11 @@ func NewOVALClient(family string, cnf config.GovalDictConf) (Client, error) {
|
||||
case constant.RedHat:
|
||||
return NewRedhat(&cnf), nil
|
||||
case constant.CentOS:
|
||||
//use RedHat's OVAL
|
||||
return NewCentOS(&cnf), nil
|
||||
case constant.Alma:
|
||||
return NewAlma(&cnf), nil
|
||||
case constant.Rocky:
|
||||
return NewRocky(&cnf), nil
|
||||
case constant.Oracle:
|
||||
return NewOracle(&cnf), nil
|
||||
case constant.SUSEEnterpriseServer:
|
||||
@@ -446,6 +507,8 @@ func NewOVALClient(family string, cnf config.GovalDictConf) (Client, error) {
|
||||
return NewAlpine(&cnf), nil
|
||||
case constant.Amazon:
|
||||
return NewAmazon(&cnf), nil
|
||||
case constant.Fedora:
|
||||
return NewFedora(&cnf), nil
|
||||
case constant.FreeBSD, constant.Windows:
|
||||
return nil, nil
|
||||
case constant.ServerTypePseudo:
|
||||
@@ -459,18 +522,17 @@ func NewOVALClient(family string, cnf config.GovalDictConf) (Client, error) {
|
||||
}
|
||||
|
||||
// GetFamilyInOval returns the OS family name in OVAL
|
||||
// For example, CentOS uses Red Hat's OVAL, so return 'redhat'
|
||||
// For example, CentOS/Alma/Rocky uses Red Hat's OVAL, so return 'redhat'
|
||||
func GetFamilyInOval(familyInScanResult string) (string, error) {
|
||||
switch familyInScanResult {
|
||||
case constant.Debian, constant.Raspbian:
|
||||
return constant.Debian, nil
|
||||
case constant.Ubuntu:
|
||||
return constant.Ubuntu, nil
|
||||
case constant.RedHat:
|
||||
return constant.RedHat, nil
|
||||
case constant.CentOS:
|
||||
//use RedHat's OVAL
|
||||
case constant.RedHat, constant.CentOS, constant.Alma, constant.Rocky:
|
||||
return constant.RedHat, nil
|
||||
case constant.Fedora:
|
||||
return constant.Fedora, nil
|
||||
case constant.Oracle:
|
||||
return constant.Oracle, nil
|
||||
case constant.SUSEEnterpriseServer:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
//go:build !scanner
|
||||
// +build !scanner
|
||||
|
||||
package oval
|
||||
@@ -9,7 +10,7 @@ import (
|
||||
|
||||
"github.com/future-architect/vuls/constant"
|
||||
"github.com/future-architect/vuls/models"
|
||||
ovalmodels "github.com/kotakanbe/goval-dictionary/models"
|
||||
ovalmodels "github.com/vulsio/goval-dictionary/models"
|
||||
)
|
||||
|
||||
func TestUpsert(t *testing.T) {
|
||||
@@ -209,6 +210,7 @@ func TestIsOvalDefAffected(t *testing.T) {
|
||||
affected bool
|
||||
notFixedYet bool
|
||||
fixedIn string
|
||||
wantErr bool
|
||||
}{
|
||||
// 0. Ubuntu ovalpack.NotFixedYet == true
|
||||
{
|
||||
@@ -1077,6 +1079,472 @@ func TestIsOvalDefAffected(t *testing.T) {
|
||||
notFixedYet: false,
|
||||
fixedIn: "3.1.0",
|
||||
},
|
||||
// Rocky Linux
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.rocky.7",
|
||||
newVersionRelease: "",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.rocky.8",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.rocky.9",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.rocky.6",
|
||||
newVersionRelease: "0:1.2.3-45.el6.rocky.7",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: true,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.rocky.6",
|
||||
newVersionRelease: "0:1.2.3-45.el6.rocky.8",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.rocky.6",
|
||||
newVersionRelease: "0:1.2.3-45.el6.rocky.9",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.8",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.sl6.7",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.sl6.8",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.sl6.9",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.sl6.6",
|
||||
newVersionRelease: "0:1.2.3-45.sl6.7",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: true,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.sl6.6",
|
||||
newVersionRelease: "0:1.2.3-45.sl6.8",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.sl6.6",
|
||||
newVersionRelease: "0:1.2.3-45.sl6.9",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6.8",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: "rocky",
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "a",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
NotFixedYet: false,
|
||||
Version: "0:1.2.3-45.el6.8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "b",
|
||||
isSrcPack: false,
|
||||
versionRelease: "0:1.2.3-45.el6_7.8",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
// For kernel related packages, ignore OVAL with different major versions
|
||||
{
|
||||
in: in{
|
||||
family: constant.Rocky,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "kernel",
|
||||
Version: "4.1.0",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "kernel",
|
||||
versionRelease: "3.0.0",
|
||||
newVersionRelease: "3.2.0",
|
||||
},
|
||||
kernel: models.Kernel{
|
||||
Release: "3.0.0",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
{
|
||||
in: in{
|
||||
family: constant.Rocky,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "kernel",
|
||||
Version: "3.1.0",
|
||||
NotFixedYet: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "kernel",
|
||||
versionRelease: "3.0.0",
|
||||
newVersionRelease: "3.2.0",
|
||||
},
|
||||
kernel: models.Kernel{
|
||||
Release: "3.0.0",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "3.1.0",
|
||||
},
|
||||
// dnf module
|
||||
{
|
||||
in: in{
|
||||
@@ -1153,9 +1621,248 @@ func TestIsOvalDefAffected(t *testing.T) {
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
// dnf module 4 (long modularitylabel)
|
||||
{
|
||||
in: in{
|
||||
family: constant.Fedora,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "community-mysql",
|
||||
Version: "0:8.0.27-1.module_f35+13269+c9322734",
|
||||
Arch: "x86_64",
|
||||
NotFixedYet: false,
|
||||
ModularityLabel: "mysql:8.0:3520211031142409:f27b74a8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "community-mysql",
|
||||
arch: "x86_64",
|
||||
versionRelease: "8.0.26-1.module_f35+12627+b26747dd",
|
||||
},
|
||||
mods: []string{
|
||||
"mysql:8.0",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
notFixedYet: false,
|
||||
fixedIn: "0:8.0.27-1.module_f35+13269+c9322734",
|
||||
},
|
||||
// dnf module 5 (req is non-modular package, oval is modular package)
|
||||
{
|
||||
in: in{
|
||||
family: constant.Fedora,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "community-mysql",
|
||||
Version: "0:8.0.27-1.module_f35+13269+c9322734",
|
||||
Arch: "x86_64",
|
||||
NotFixedYet: false,
|
||||
ModularityLabel: "mysql:8.0:3520211031142409:f27b74a8",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "community-mysql",
|
||||
arch: "x86_64",
|
||||
versionRelease: "8.0.26-1.fc35",
|
||||
},
|
||||
mods: []string{
|
||||
"mysql:8.0",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
// dnf module 6 (req is modular package, oval is non-modular package)
|
||||
{
|
||||
in: in{
|
||||
family: constant.Fedora,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "community-mysql",
|
||||
Version: "0:8.0.27-1.fc35",
|
||||
Arch: "x86_64",
|
||||
NotFixedYet: false,
|
||||
ModularityLabel: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "community-mysql",
|
||||
arch: "x86_64",
|
||||
versionRelease: "8.0.26-1.module_f35+12627+b26747dd",
|
||||
},
|
||||
mods: []string{
|
||||
"mysql:8.0",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
notFixedYet: false,
|
||||
},
|
||||
// .ksplice1.
|
||||
{
|
||||
in: in{
|
||||
family: constant.Oracle,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2:2.17-106.0.1.ksplice1.el7_2.4",
|
||||
Arch: "x86_64",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2:2.17-107",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
},
|
||||
// .ksplice1.
|
||||
{
|
||||
in: in{
|
||||
family: constant.Oracle,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2:2.17-106.0.1.ksplice1.el7_2.4",
|
||||
Arch: "x86_64",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2:2.17-105.0.1.ksplice1.el7_2.4",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
fixedIn: "2:2.17-106.0.1.ksplice1.el7_2.4",
|
||||
},
|
||||
// same arch
|
||||
{
|
||||
in: in{
|
||||
family: constant.Oracle,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2.17-106.0.1",
|
||||
Arch: "x86_64",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2.17-105.0.1",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
fixedIn: "2.17-106.0.1",
|
||||
},
|
||||
// different arch
|
||||
{
|
||||
in: in{
|
||||
family: constant.Oracle,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2.17-106.0.1",
|
||||
Arch: "aarch64",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2.17-105.0.1",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
affected: false,
|
||||
fixedIn: "",
|
||||
},
|
||||
// Arch for RHEL, CentOS is ""
|
||||
{
|
||||
in: in{
|
||||
family: constant.RedHat,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2.17-106.0.1",
|
||||
Arch: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2.17-105.0.1",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
affected: true,
|
||||
fixedIn: "2.17-106.0.1",
|
||||
},
|
||||
// arch is empty for Oracle, Amazon linux
|
||||
{
|
||||
in: in{
|
||||
family: constant.Oracle,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2.17-106.0.1",
|
||||
Arch: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2.17-105.0.1",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
fixedIn: "",
|
||||
},
|
||||
// arch is empty for Oracle, Amazon linux
|
||||
{
|
||||
in: in{
|
||||
family: constant.Amazon,
|
||||
def: ovalmodels.Definition{
|
||||
AffectedPacks: []ovalmodels.Package{
|
||||
{
|
||||
Name: "nginx",
|
||||
Version: "2.17-106.0.1",
|
||||
Arch: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
req: request{
|
||||
packName: "nginx",
|
||||
versionRelease: "2.17-105.0.1",
|
||||
arch: "x86_64",
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
fixedIn: "",
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
affected, notFixedYet, fixedIn := isOvalDefAffected(tt.in.def, tt.in.req, tt.in.family, tt.in.kernel, tt.in.mods)
|
||||
affected, notFixedYet, fixedIn, err := isOvalDefAffected(tt.in.def, tt.in.req, tt.in.family, tt.in.kernel, tt.in.mods)
|
||||
if tt.wantErr != (err != nil) {
|
||||
t.Errorf("[%d] err\nexpected: %t\n actual: %s\n", i, tt.wantErr, err)
|
||||
}
|
||||
if tt.affected != affected {
|
||||
t.Errorf("[%d] affected\nexpected: %v\n actual: %v\n", i, tt.affected, affected)
|
||||
}
|
||||
@@ -1168,7 +1875,7 @@ func TestIsOvalDefAffected(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func Test_centOSVersionToRHEL(t *testing.T) {
|
||||
func Test_rhelDownStreamOSVersionToRHEL(t *testing.T) {
|
||||
type args struct {
|
||||
ver string
|
||||
}
|
||||
@@ -1184,6 +1891,13 @@ func Test_centOSVersionToRHEL(t *testing.T) {
|
||||
},
|
||||
want: "grub2-tools-2.02-0.80.el7.x86_64",
|
||||
},
|
||||
{
|
||||
name: "remove rocky.",
|
||||
args: args{
|
||||
ver: "platform-python-3.6.8-37.el8.rocky.x86_64",
|
||||
},
|
||||
want: "platform-python-3.6.8-37.el8.x86_64",
|
||||
},
|
||||
{
|
||||
name: "noop",
|
||||
args: args{
|
||||
@@ -1201,8 +1915,8 @@ func Test_centOSVersionToRHEL(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := centOSVersionToRHEL(tt.args.ver); got != tt.want {
|
||||
t.Errorf("centOSVersionToRHEL() = %v, want %v", got, tt.want)
|
||||
if got := rhelRebuildOSVersionToRHEL(tt.args.ver); got != tt.want {
|
||||
t.Errorf("rhelRebuildOSVersionToRHEL() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
102
reporter/googlechat.go
Normal file
102
reporter/googlechat.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package reporter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/models"
|
||||
"github.com/future-architect/vuls/util"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// GoogleChatWriter send report to GoogleChat
|
||||
type GoogleChatWriter struct {
|
||||
Cnf config.GoogleChatConf
|
||||
Proxy string
|
||||
}
|
||||
|
||||
func (w GoogleChatWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
re := regexp.MustCompile(w.Cnf.ServerNameRegexp)
|
||||
|
||||
for _, r := range rs {
|
||||
if re.Match([]byte(r.FormatServerName())) {
|
||||
continue
|
||||
}
|
||||
msgs := []string{fmt.Sprintf("*%s*\n%s\t%s\t%s",
|
||||
r.ServerInfo(),
|
||||
r.ScannedCves.FormatCveSummary(),
|
||||
r.ScannedCves.FormatFixedStatus(r.Packages),
|
||||
r.FormatUpdatablePkgsSummary())}
|
||||
for _, vinfo := range r.ScannedCves.ToSortedSlice() {
|
||||
max := vinfo.MaxCvssScore().Value.Score
|
||||
|
||||
exploits := ""
|
||||
if 0 < len(vinfo.Exploits) || 0 < len(vinfo.Metasploits) {
|
||||
exploits = "*PoC*"
|
||||
}
|
||||
|
||||
link := ""
|
||||
if strings.HasPrefix(vinfo.CveID, "CVE-") {
|
||||
link = fmt.Sprintf("https://nvd.nist.gov/vuln/detail/%s", vinfo.CveID)
|
||||
} else if strings.HasPrefix(vinfo.CveID, "WPVDBID-") {
|
||||
link = fmt.Sprintf("https://wpscan.com/vulnerabilities/%s", strings.TrimPrefix(vinfo.CveID, "WPVDBID-"))
|
||||
}
|
||||
|
||||
msgs = append(msgs, fmt.Sprintf(`%s %s %4.1f %5s %s`,
|
||||
vinfo.CveIDDiffFormat(),
|
||||
link,
|
||||
max,
|
||||
vinfo.AttackVector(),
|
||||
exploits))
|
||||
if len(msgs) == 50 {
|
||||
msgs = append(msgs, "(The rest is omitted.)")
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(msgs) == 1 && w.Cnf.SkipIfNoCve {
|
||||
msgs = []string{}
|
||||
}
|
||||
if len(msgs) != 0 {
|
||||
if err = w.postMessage(strings.Join(msgs, "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w GoogleChatWriter) postMessage(message string) error {
|
||||
uri := fmt.Sprintf("%s", w.Cnf.WebHookURL)
|
||||
payload := `{"text": "` + message + `" }`
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, uri, bytes.NewBuffer([]byte(payload)))
|
||||
defer cancel()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Content-Type", "application/json; charset=utf-8")
|
||||
client, err := util.GetHTTPClient(w.Proxy)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if checkResponse(resp) != nil && err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w GoogleChatWriter) checkResponse(r *http.Response) error {
|
||||
if c := r.StatusCode; 200 <= c && c <= 299 {
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("API call to %s failed: %s", r.Request.URL.String(), r.Status)
|
||||
}
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
|
||||
// HTTPRequestWriter writes results to HTTP request
|
||||
type HTTPRequestWriter struct {
|
||||
Proxy string
|
||||
URL string
|
||||
}
|
||||
|
||||
// Write sends results as HTTP response
|
||||
@@ -21,7 +21,7 @@ func (w HTTPRequestWriter) Write(rs ...models.ScanResult) (err error) {
|
||||
if err := json.NewEncoder(b).Encode(r); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = http.Post(w.Proxy, "application/json; charset=utf-8", b)
|
||||
_, err = http.Post(w.URL, "application/json; charset=utf-8", b)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -269,19 +269,20 @@ func (w SlackWriter) attachmentText(vinfo models.VulnInfo, cweDict map[string]mo
|
||||
vinfo.CveID)
|
||||
}
|
||||
|
||||
if cont, ok := vinfo.CveContents[cvss.Type]; ok {
|
||||
v := fmt.Sprintf("<%s|%s> %s (<%s|%s>)",
|
||||
calcURL,
|
||||
fmt.Sprintf("%3.1f/%s", cvss.Value.Score, cvss.Value.Vector),
|
||||
cvss.Value.Severity,
|
||||
cont.SourceLink,
|
||||
cvss.Type)
|
||||
vectors = append(vectors, v)
|
||||
|
||||
if conts, ok := vinfo.CveContents[cvss.Type]; ok {
|
||||
for _, cont := range conts {
|
||||
v := fmt.Sprintf("<%s|%s> %s (<%s|%s>)",
|
||||
calcURL,
|
||||
fmt.Sprintf("%3.1f/%s", cvss.Value.Score, cvss.Value.Vector),
|
||||
cvss.Value.Severity,
|
||||
cont.SourceLink,
|
||||
cvss.Type)
|
||||
vectors = append(vectors, v)
|
||||
}
|
||||
} else {
|
||||
if 0 < len(vinfo.DistroAdvisories) {
|
||||
links := []string{}
|
||||
for _, v := range vinfo.CveContents.PrimarySrcURLs(w.lang, w.osFamily, vinfo.CveID) {
|
||||
for _, v := range vinfo.CveContents.PrimarySrcURLs(w.lang, w.osFamily, vinfo.CveID, vinfo.Confidences) {
|
||||
links = append(links, fmt.Sprintf("<%s|%s>", v.Value, v.Type))
|
||||
}
|
||||
|
||||
|
||||
@@ -70,16 +70,20 @@ func (w SyslogWriter) encodeSyslog(result models.ScanResult) (messages []string)
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`cvss_vector_%s_v3="%s"`, cvss.Type, cvss.Value.Vector))
|
||||
}
|
||||
|
||||
if content, ok := vinfo.CveContents[models.Nvd]; ok {
|
||||
cwes := strings.Join(content.CweIDs, ",")
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`cwe_ids="%s"`, cwes))
|
||||
if w.Cnf.Verbose {
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`source_link="%s"`, content.SourceLink))
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`summary="%s"`, content.Summary))
|
||||
if conts, ok := vinfo.CveContents[models.Nvd]; ok {
|
||||
for _, cont := range conts {
|
||||
cwes := strings.Join(cont.CweIDs, ",")
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`cwe_ids="%s"`, cwes))
|
||||
if w.Cnf.Verbose {
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`source_link="%s"`, cont.SourceLink))
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`summary="%s"`, cont.Summary))
|
||||
}
|
||||
}
|
||||
}
|
||||
if content, ok := vinfo.CveContents[models.RedHat]; ok {
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`title="%s"`, content.Title))
|
||||
if conts, ok := vinfo.CveContents[models.RedHat]; ok {
|
||||
for _, cont := range conts {
|
||||
kvPairs = append(kvPairs, fmt.Sprintf(`title="%s"`, cont.Title))
|
||||
}
|
||||
}
|
||||
|
||||
// message: key1="value1" key2="value2"...
|
||||
|
||||
@@ -33,7 +33,7 @@ func TestSyslogWriterEncodeSyslog(t *testing.T) {
|
||||
models.PackageFixStatus{Name: "pkg4"},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
models.Nvd: models.CveContent{
|
||||
models.Nvd: []models.CveContent{{
|
||||
Cvss2Score: 5.0,
|
||||
Cvss2Vector: "AV:L/AC:L/Au:N/C:N/I:N/A:C",
|
||||
Cvss2Severity: "MEDIUM",
|
||||
@@ -41,7 +41,7 @@ func TestSyslogWriterEncodeSyslog(t *testing.T) {
|
||||
Cvss3Score: 9.8,
|
||||
Cvss3Vector: "AV:L/AC:L/Au:N/C:N/I:N/A:C",
|
||||
Cvss3Severity: "HIGH",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -65,13 +65,13 @@ func TestSyslogWriterEncodeSyslog(t *testing.T) {
|
||||
models.PackageFixStatus{Name: "pkg5"},
|
||||
},
|
||||
CveContents: models.CveContents{
|
||||
models.RedHat: models.CveContent{
|
||||
models.RedHat: []models.CveContent{{
|
||||
Cvss3Score: 5.0,
|
||||
Cvss3Severity: "Medium",
|
||||
Cvss3Vector: "AV:L/AC:L/Au:N/C:N/I:N/A:C",
|
||||
CweIDs: []string{"CWE-284"},
|
||||
Title: "RHSA-2017:0001: pkg5 security update (Important)",
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
@@ -276,7 +277,7 @@ No CVE-IDs are found in updatable packages.
|
||||
// fmt.Sprintf("%4.1f", v2max),
|
||||
// fmt.Sprintf("%4.1f", v3max),
|
||||
exploits,
|
||||
vinfo.AlertDict.FormatSource(),
|
||||
fmt.Sprintf("%9s", vinfo.AlertDict.FormatSource()),
|
||||
fmt.Sprintf("%7s", vinfo.PatchStatus(r.Packages)),
|
||||
link,
|
||||
})
|
||||
@@ -291,7 +292,7 @@ No CVE-IDs are found in updatable packages.
|
||||
// "v3",
|
||||
// "v2",
|
||||
"PoC",
|
||||
"CERT",
|
||||
"Alert",
|
||||
"Fixed",
|
||||
"NVD",
|
||||
})
|
||||
@@ -347,7 +348,7 @@ No CVE-IDs are found in updatable packages.
|
||||
data = append(data, []string{"Mitigation", m.URL})
|
||||
}
|
||||
|
||||
links := vuln.CveContents.PrimarySrcURLs(r.Lang, r.Family, vuln.CveID)
|
||||
links := vuln.CveContents.PrimarySrcURLs(r.Lang, r.Family, vuln.CveID, vuln.Confidences)
|
||||
for _, link := range links {
|
||||
data = append(data, []string{"Primary Src", link.Value})
|
||||
}
|
||||
@@ -474,11 +475,15 @@ No CVE-IDs are found in updatable packages.
|
||||
data = append(data, []string{"SANS/CWE Top25", sansTop25URLs[0]})
|
||||
}
|
||||
|
||||
for _, alert := range vuln.AlertDict.Ja {
|
||||
for _, alert := range vuln.AlertDict.CISA {
|
||||
data = append(data, []string{"CISA Alert", alert.URL})
|
||||
}
|
||||
|
||||
for _, alert := range vuln.AlertDict.JPCERT {
|
||||
data = append(data, []string{"JPCERT Alert", alert.URL})
|
||||
}
|
||||
|
||||
for _, alert := range vuln.AlertDict.En {
|
||||
for _, alert := range vuln.AlertDict.USCERT {
|
||||
data = append(data, []string{"US-CERT Alert", alert.URL})
|
||||
}
|
||||
|
||||
@@ -620,7 +625,7 @@ func getPlusDiffCves(previous, current models.ScanResult) models.VulnInfos {
|
||||
|
||||
// TODO commented out because a bug of diff logic when multiple oval defs found for a certain CVE-ID and same updated_at
|
||||
// if these OVAL defs have different affected packages, this logic detects as updated.
|
||||
// This logic will be uncomented after integration with gost https://github.com/knqyf263/gost
|
||||
// This logic will be uncomented after integration with gost https://github.com/vulsio/gost
|
||||
// } else if isCveFixed(v, previous) {
|
||||
// updated[v.CveID] = v
|
||||
// logging.Log.Debugf("fixed: %s", v.CveID)
|
||||
@@ -673,32 +678,36 @@ func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
|
||||
models.NewCveContentType(current.Family),
|
||||
}
|
||||
|
||||
prevLastModified := map[models.CveContentType]time.Time{}
|
||||
prevLastModifieds := map[models.CveContentType][]time.Time{}
|
||||
preVinfo, ok := previous.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if content, ok := preVinfo.CveContents[cType]; ok {
|
||||
prevLastModified[cType] = content.LastModified
|
||||
if conts, ok := preVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
prevLastModifieds[cType] = append(prevLastModifieds[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
curLastModified := map[models.CveContentType]time.Time{}
|
||||
curLastModifieds := map[models.CveContentType][]time.Time{}
|
||||
curVinfo, ok := current.ScannedCves[cveID]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
for _, cType := range cTypes {
|
||||
if content, ok := curVinfo.CveContents[cType]; ok {
|
||||
curLastModified[cType] = content.LastModified
|
||||
if conts, ok := curVinfo.CveContents[cType]; ok {
|
||||
for _, cont := range conts {
|
||||
curLastModifieds[cType] = append(curLastModifieds[cType], cont.LastModified)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, t := range cTypes {
|
||||
if !curLastModified[t].Equal(prevLastModified[t]) {
|
||||
if !reflect.DeepEqual(curLastModifieds[t], prevLastModifieds[t]) {
|
||||
logging.Log.Debugf("%s LastModified not equal: \n%s\n%s",
|
||||
cveID, curLastModified[t], prevLastModified[t])
|
||||
cveID, curLastModifieds[t], prevLastModifieds[t])
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ import (
|
||||
// EnsureUUIDs generate a new UUID of the scan target server if UUID is not assigned yet.
|
||||
// And then set the generated UUID to config.toml and scan results.
|
||||
func EnsureUUIDs(servers map[string]config.ServerInfo, path string, scanResults models.ScanResults) (err error) {
|
||||
needsOverwrite, err := ensure(servers, path, scanResults, uuid.GenerateUUID)
|
||||
needsOverwrite, err := ensure(servers, scanResults, uuid.GenerateUUID)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("Failed to ensure UUIDs. err: %w", err)
|
||||
}
|
||||
@@ -30,7 +30,7 @@ func EnsureUUIDs(servers map[string]config.ServerInfo, path string, scanResults
|
||||
return writeToFile(config.Conf, path)
|
||||
}
|
||||
|
||||
func ensure(servers map[string]config.ServerInfo, path string, scanResults models.ScanResults, generateFunc func() (string, error)) (needsOverwrite bool, err error) {
|
||||
func ensure(servers map[string]config.ServerInfo, scanResults models.ScanResults, generateFunc func() (string, error)) (needsOverwrite bool, err error) {
|
||||
for i, r := range scanResults {
|
||||
serverInfo := servers[r.ServerName]
|
||||
if serverInfo.UUIDs == nil {
|
||||
|
||||
@@ -377,7 +377,7 @@ func Test_ensure(t *testing.T) {
|
||||
}
|
||||
for i, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotNeedsOverwrite, err := ensure(tt.args.servers, tt.args.path, tt.args.scanResults, tt.args.generateFunc)
|
||||
gotNeedsOverwrite, err := ensure(tt.args.servers, tt.args.scanResults, tt.args.generateFunc)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("ensure() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
|
||||
118
scanner/alma.go
Normal file
118
scanner/alma.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"github.com/future-architect/vuls/config"
|
||||
"github.com/future-architect/vuls/logging"
|
||||
"github.com/future-architect/vuls/models"
|
||||
)
|
||||
|
||||
// inherit OsTypeInterface
|
||||
type alma struct {
|
||||
redhatBase
|
||||
}
|
||||
|
||||
// NewAlma is constructor
|
||||
func newAlma(c config.ServerInfo) *alma {
|
||||
r := &alma{
|
||||
redhatBase{
|
||||
base: base{
|
||||
osPackages: osPackages{
|
||||
Packages: models.Packages{},
|
||||
VulnInfos: models.VulnInfos{},
|
||||
},
|
||||
},
|
||||
sudo: rootPrivAlma{},
|
||||
},
|
||||
}
|
||||
r.log = logging.NewNormalLogger()
|
||||
r.setServerInfo(c)
|
||||
return r
|
||||
}
|
||||
|
||||
func (o *alma) checkScanMode() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *alma) checkDeps() error {
|
||||
if o.getServerInfo().Mode.IsFast() {
|
||||
return o.execCheckDeps(o.depsFast())
|
||||
} else if o.getServerInfo().Mode.IsFastRoot() {
|
||||
return o.execCheckDeps(o.depsFastRoot())
|
||||
} else {
|
||||
return o.execCheckDeps(o.depsDeep())
|
||||
}
|
||||
}
|
||||
|
||||
func (o *alma) depsFast() []string {
|
||||
if o.getServerInfo().Mode.IsOffline() {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
// repoquery
|
||||
// `rpm -qa` shows dnf-utils as yum-utils on RHEL8, CentOS8, Alma8, Rocky8
|
||||
return []string{"yum-utils"}
|
||||
}
|
||||
|
||||
func (o *alma) depsFastRoot() []string {
|
||||
if o.getServerInfo().Mode.IsOffline() {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
// repoquery
|
||||
// `rpm -qa` shows dnf-utils as yum-utils on RHEL8, CentOS8, Alma8, Rocky8
|
||||
return []string{"yum-utils"}
|
||||
}
|
||||
|
||||
func (o *alma) depsDeep() []string {
|
||||
return o.depsFastRoot()
|
||||
}
|
||||
|
||||
func (o *alma) checkIfSudoNoPasswd() error {
|
||||
if o.getServerInfo().Mode.IsFast() {
|
||||
return o.execCheckIfSudoNoPasswd(o.sudoNoPasswdCmdsFast())
|
||||
} else if o.getServerInfo().Mode.IsFastRoot() {
|
||||
return o.execCheckIfSudoNoPasswd(o.sudoNoPasswdCmdsFastRoot())
|
||||
} else {
|
||||
return o.execCheckIfSudoNoPasswd(o.sudoNoPasswdCmdsDeep())
|
||||
}
|
||||
}
|
||||
|
||||
func (o *alma) sudoNoPasswdCmdsFast() []cmd {
|
||||
return []cmd{}
|
||||
}
|
||||
|
||||
func (o *alma) sudoNoPasswdCmdsFastRoot() []cmd {
|
||||
if !o.ServerInfo.IsContainer() {
|
||||
return []cmd{
|
||||
{"repoquery -h", exitStatusZero},
|
||||
{"needs-restarting", exitStatusZero},
|
||||
{"which which", exitStatusZero},
|
||||
{"stat /proc/1/exe", exitStatusZero},
|
||||
{"ls -l /proc/1/exe", exitStatusZero},
|
||||
{"cat /proc/1/maps", exitStatusZero},
|
||||
{"lsof -i -P -n", exitStatusZero},
|
||||
}
|
||||
}
|
||||
return []cmd{
|
||||
{"repoquery -h", exitStatusZero},
|
||||
{"needs-restarting", exitStatusZero},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *alma) sudoNoPasswdCmdsDeep() []cmd {
|
||||
return o.sudoNoPasswdCmdsFastRoot()
|
||||
}
|
||||
|
||||
type rootPrivAlma struct{}
|
||||
|
||||
func (o rootPrivAlma) repoquery() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (o rootPrivAlma) yumMakeCache() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (o rootPrivAlma) yumPS() bool {
|
||||
return false
|
||||
}
|
||||
@@ -84,7 +84,7 @@ func (o *amazon) sudoNoPasswdCmdsFastRoot() []cmd {
|
||||
{"stat /proc/1/exe", exitStatusZero},
|
||||
{"ls -l /proc/1/exe", exitStatusZero},
|
||||
{"cat /proc/1/maps", exitStatusZero},
|
||||
{"lsof -i -P", exitStatusZero},
|
||||
{"lsof -i -P -n", exitStatusZero},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user