Compare commits

...

280 Commits

Author SHA1 Message Date
MaineK00n
bbb8fcbb42 refactor(amazon): version determination, parseInstalledPackagesLine 2023-08-01 18:25:45 +09:00
dependabot[bot]
a23abf48fd chore(deps): bump github.com/sirupsen/logrus from 1.9.0 to 1.9.3 (#1687)
Bumps [github.com/sirupsen/logrus](https://github.com/sirupsen/logrus) from 1.9.0 to 1.9.3.
- [Release notes](https://github.com/sirupsen/logrus/releases)
- [Changelog](https://github.com/sirupsen/logrus/blob/master/CHANGELOG.md)
- [Commits](https://github.com/sirupsen/logrus/compare/v1.9.0...v1.9.3)

---
updated-dependencies:
- dependency-name: github.com/sirupsen/logrus
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-22 21:47:35 +09:00
dependabot[bot]
6e14a2dee6 chore(deps): bump github.com/aws/aws-sdk-go from 1.44.263 to 1.44.300 (#1706)
Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.263 to 1.44.300.
- [Release notes](https://github.com/aws/aws-sdk-go/releases)
- [Commits](https://github.com/aws/aws-sdk-go/compare/v1.44.263...v1.44.300)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-22 21:47:19 +09:00
dependabot[bot]
e12fa0ba64 chore(deps): bump google.golang.org/grpc from 1.52.0 to 1.53.0 (#1699)
Bumps [google.golang.org/grpc](https://github.com/grpc/grpc-go) from 1.52.0 to 1.53.0.
- [Release notes](https://github.com/grpc/grpc-go/releases)
- [Commits](https://github.com/grpc/grpc-go/compare/v1.52.0...v1.53.0)

---
updated-dependencies:
- dependency-name: google.golang.org/grpc
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-22 21:42:06 +09:00
dependabot[bot]
fa5b875c34 chore(deps): bump github.com/BurntSushi/toml from 1.2.1 to 1.3.2 (#1692)
Bumps [github.com/BurntSushi/toml](https://github.com/BurntSushi/toml) from 1.2.1 to 1.3.2.
- [Release notes](https://github.com/BurntSushi/toml/releases)
- [Commits](https://github.com/BurntSushi/toml/compare/v1.2.1...v1.3.2)

---
updated-dependencies:
- dependency-name: github.com/BurntSushi/toml
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-22 21:38:04 +09:00
sadayuki-matsuno
f9276a7ea8 feat(windows) export DetectKBsFromKernelVersion (#1703) 2023-07-13 10:14:49 +09:00
MaineK00n
457a3a9627 feat(scanner/windows): update release info (#1696) 2023-06-29 14:05:10 +09:00
MaineK00n
4253550c99 chore(scanner): do not show logs when lsof: no Internet files located (#1688) 2023-06-23 16:08:49 +09:00
Atsushi Watanabe
97cf033ed6 feat(os): add Fedora 38 EOL date (#1689)
* feat: add Fedora 38 EOL date

* Update EOL date

based on https://fedorapeople.org/groups/schedule/f-38/f-38-key-tasks.html

* Fix test case name

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>

---------

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2023-06-13 17:23:11 +09:00
Kota Kanbe
5a6980436a feat(ubuntu): Support Ubuntu 14.04 and 16.04 ESM (#1682)
* feat(ubuntu): Support Ubuntu ESM

* Sort PackageFixStatuses to resolve the diff in integrationTest

* go mod update gost
2023-05-31 09:27:43 +09:00
Sinclair
6271ec522e fix(detector/github): Enhance the dependency graph API call on the big repository (#1681)
* fix: Reduce the number of data to be fetched per page, when retrying after a timeout failure on Dependency Graph API

* check rate limit on dependency graph API

* comment
2023-05-26 14:39:02 +09:00
dependabot[bot]
83681ad4f0 chore(deps): bump github.com/aws/aws-sdk-go from 1.44.259 to 1.44.263 (#1677)
Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.259 to 1.44.263.
- [Release notes](https://github.com/aws/aws-sdk-go/releases)
- [Commits](https://github.com/aws/aws-sdk-go/compare/v1.44.259...v1.44.263)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-05-18 10:17:34 +09:00
dependabot[bot]
779833872b chore(deps): bump golang.org/x/oauth2 from 0.7.0 to 0.8.0 (#1678)
Bumps [golang.org/x/oauth2](https://github.com/golang/oauth2) from 0.7.0 to 0.8.0.
- [Commits](https://github.com/golang/oauth2/compare/v0.7.0...v0.8.0)

---
updated-dependencies:
- dependency-name: golang.org/x/oauth2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-05-18 10:16:54 +09:00
Sinclair
5c79720f56 fix(detector/github): Dependency graph API touches fewer data per page than before (#1654)
* fix: Github dependency graph API touches fewer data per page than before

* fix: logging on Github API access failure

* fix: the previous errors persist upon retrying dependency graph
2023-05-15 19:41:04 +09:00
Wagde Zabit
b2c5b79672 feat(os): support debian 12 (#1676)
* feat(os): support debian 12

* chore(scanner/debian): remove unneeded warn log

---------

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2023-05-13 01:04:31 +09:00
dependabot[bot]
b0cc908b73 chore(deps): bump github.com/docker/distribution (#1675)
Bumps [github.com/docker/distribution](https://github.com/docker/distribution) from 2.8.1+incompatible to 2.8.2+incompatible.
- [Release notes](https://github.com/docker/distribution/releases)
- [Commits](https://github.com/docker/distribution/compare/v2.8.1...v2.8.2)

---
updated-dependencies:
- dependency-name: github.com/docker/distribution
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-05-12 08:56:46 +09:00
MaineK00n
ea3d8a6d0b test: sort []cveContent by CVEID (#1674) 2023-05-11 00:53:22 +09:00
MaineK00n
7475b27f6a chore(deps): update dictionary tools, Vuls is now CGO free (#1667)
* chore(deps): update dictionary tools, Vuls is now CGO free

* chore(integration): update commit
2023-05-11 00:28:51 +09:00
dependabot[bot]
ef80838ddd chore(deps): bump github.com/aws/aws-sdk-go from 1.44.254 to 1.44.259 (#1672)
Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.254 to 1.44.259.
- [Release notes](https://github.com/aws/aws-sdk-go/releases)
- [Commits](https://github.com/aws/aws-sdk-go/compare/v1.44.254...v1.44.259)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-05-09 08:09:49 +09:00
dependabot[bot]
b445b71ca5 chore(deps): bump golang.org/x/sync from 0.1.0 to 0.2.0 (#1673)
Bumps [golang.org/x/sync](https://github.com/golang/sync) from 0.1.0 to 0.2.0.
- [Commits](https://github.com/golang/sync/compare/v0.1.0...v0.2.0)

---
updated-dependencies:
- dependency-name: golang.org/x/sync
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-05-09 08:09:26 +09:00
dependabot[bot]
1ccc5f031a chore(deps): bump github.com/aws/aws-sdk-go from 1.44.251 to 1.44.254 (#1669)
Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.251 to 1.44.254.
- [Release notes](https://github.com/aws/aws-sdk-go/releases)
- [Commits](https://github.com/aws/aws-sdk-go/compare/v1.44.251...v1.44.254)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-05-02 12:17:14 +09:00
MaineK00n
8356e976c4 chore(deps): update goval-dictionary v0.8.3 (#1671) 2023-05-02 12:14:43 +09:00
MaineK00n
3cc7e92ce5 fix(saas): remove current directory part (#1666) 2023-04-27 12:09:34 +09:00
dependabot[bot]
046a29467b chore(deps): bump github.com/CycloneDX/cyclonedx-go from 0.7.0 to 0.7.1 (#1663)
Bumps [github.com/CycloneDX/cyclonedx-go](https://github.com/CycloneDX/cyclonedx-go) from 0.7.0 to 0.7.1.
- [Release notes](https://github.com/CycloneDX/cyclonedx-go/releases)
- [Changelog](https://github.com/CycloneDX/cyclonedx-go/blob/master/.goreleaser.yml)
- [Commits](https://github.com/CycloneDX/cyclonedx-go/compare/v0.7.0...v0.7.1)

---
updated-dependencies:
- dependency-name: github.com/CycloneDX/cyclonedx-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 05:54:41 +09:00
dependabot[bot]
ef5ab8eaf0 chore(deps): bump golang.org/x/oauth2 from 0.1.0 to 0.7.0 (#1662)
Bumps [golang.org/x/oauth2](https://github.com/golang/oauth2) from 0.1.0 to 0.7.0.
- [Release notes](https://github.com/golang/oauth2/releases)
- [Commits](https://github.com/golang/oauth2/compare/v0.1.0...v0.7.0)

---
updated-dependencies:
- dependency-name: golang.org/x/oauth2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 05:42:57 +09:00
dependabot[bot]
c8daa5c982 chore(deps): bump github.com/Ullaakut/nmap/v2 (#1665)
Bumps [github.com/Ullaakut/nmap/v2](https://github.com/Ullaakut/nmap) from 2.1.2-0.20210406060955-59a52fe80a4f to 2.2.2.
- [Release notes](https://github.com/Ullaakut/nmap/releases)
- [Commits](https://github.com/Ullaakut/nmap/commits/v2.2.2)

---
updated-dependencies:
- dependency-name: github.com/Ullaakut/nmap/v2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 05:41:49 +09:00
dependabot[bot]
9309081b3d chore(deps): bump github.com/aws/aws-sdk-go from 1.44.249 to 1.44.251 (#1660)
Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.249 to 1.44.251.
- [Release notes](https://github.com/aws/aws-sdk-go/releases)
- [Commits](https://github.com/aws/aws-sdk-go/compare/v1.44.249...v1.44.251)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 04:01:42 +09:00
dependabot[bot]
f541c32d1f chore(deps): bump github.com/c-robinson/iplib from 1.0.3 to 1.0.6 (#1659)
Bumps [github.com/c-robinson/iplib](https://github.com/c-robinson/iplib) from 1.0.3 to 1.0.6.
- [Release notes](https://github.com/c-robinson/iplib/releases)
- [Commits](https://github.com/c-robinson/iplib/compare/v1.0.3...v1.0.6)

---
updated-dependencies:
- dependency-name: github.com/c-robinson/iplib
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 03:51:34 +09:00
dependabot[bot]
79a8b62105 chore(deps): bump go.etcd.io/bbolt from 1.3.6 to 1.3.7 (#1657)
Bumps [go.etcd.io/bbolt](https://github.com/etcd-io/bbolt) from 1.3.6 to 1.3.7.
- [Release notes](https://github.com/etcd-io/bbolt/releases)
- [Commits](https://github.com/etcd-io/bbolt/compare/v1.3.6...v1.3.7)

---
updated-dependencies:
- dependency-name: go.etcd.io/bbolt
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 03:50:53 +09:00
dependabot[bot]
74c91a5a21 chore(deps): bump github.com/spf13/cobra from 1.6.1 to 1.7.0 (#1658)
Bumps [github.com/spf13/cobra](https://github.com/spf13/cobra) from 1.6.1 to 1.7.0.
- [Release notes](https://github.com/spf13/cobra/releases)
- [Commits](https://github.com/spf13/cobra/compare/v1.6.1...v1.7.0)

---
updated-dependencies:
- dependency-name: github.com/spf13/cobra
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 03:36:46 +09:00
MaineK00n
6787ab45c5 feat(ubuntu): add ubuntu 23.04 (#1647) 2023-04-27 03:26:59 +09:00
dependabot[bot]
f631e9e603 chore(deps): bump github.com/emersion/go-smtp from 0.14.0 to 0.16.0 (#1580)
Bumps [github.com/emersion/go-smtp](https://github.com/emersion/go-smtp) from 0.14.0 to 0.16.0.
- [Release notes](https://github.com/emersion/go-smtp/releases)
- [Commits](https://github.com/emersion/go-smtp/compare/v0.14.0...v0.16.0)

---
updated-dependencies:
- dependency-name: github.com/emersion/go-smtp
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 03:25:41 +09:00
dependabot[bot]
2ab48afe47 chore(deps): bump github.com/aws/aws-sdk-go from 1.44.136 to 1.44.249 (#1656)
Bumps [github.com/aws/aws-sdk-go](https://github.com/aws/aws-sdk-go) from 1.44.136 to 1.44.249.
- [Release notes](https://github.com/aws/aws-sdk-go/releases)
- [Commits](https://github.com/aws/aws-sdk-go/compare/v1.44.136...v1.44.249)

---
updated-dependencies:
- dependency-name: github.com/aws/aws-sdk-go
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 03:24:53 +09:00
dependabot[bot]
53ccd61687 chore(deps): bump github.com/Azure/azure-sdk-for-go (#1588)
Bumps [github.com/Azure/azure-sdk-for-go](https://github.com/Azure/azure-sdk-for-go) from 66.0.0+incompatible to 68.0.0+incompatible.
- [Release notes](https://github.com/Azure/azure-sdk-for-go/releases)
- [Changelog](https://github.com/Azure/azure-sdk-for-go/blob/main/documentation/release.md)
- [Commits](https://github.com/Azure/azure-sdk-for-go/compare/v66.0.0...v68.0.0)

---
updated-dependencies:
- dependency-name: github.com/Azure/azure-sdk-for-go
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-04-27 03:20:58 +09:00
Sinclair
b91a7b75e2 fix(detector/github): Github dependency graph API request will be retried on error (#1650)
* fix: Github dependency graph API request will be retried on error

* fix: github dependency graph: error handling

* github dependency graph: fix retry max
2023-04-24 12:46:29 +09:00
Wagde Zabit
333eae06ea fix order in identifying amazon linux version (#1652) 2023-04-21 10:35:19 +09:00
MaineK00n
93d401c70c chore(integration): update commit (#1649) 2023-04-20 14:09:21 +09:00
MaineK00n
99dc8e892f feat(gost/ubuntu): check kernel source package more strictly (#1599) 2023-04-20 13:05:41 +09:00
MaineK00n
fb904f0543 refactor(reporter): refactoring TelegramWriter, GoogleChatWriter (#1628)
* style: remove unnecessary line break

* style: use regexp.MatchString instead of regexp.Match

* refactor(reporter): refactoring TelegramWriter, GoogleChatWriter
2023-04-20 11:53:31 +09:00
MaineK00n
d4d33fc81d fix(scanner/dpkg): Fix false-negative in Debian and Ubuntu (#1646)
* fix(scanner/dpkg): fix dpkg-query and not remove src pkgs

* refactor(gost): remove unnecesary field and fix typo

* refactor(detector/debian): detect using only SrcPackage
2023-04-20 11:42:53 +09:00
Kota Kanbe
a1d3fbf66f fix(scan): false positives in Debian Pkg for CVE-IDs already detected by Trivy (#1639)
* fix(scan): false positives in Debian Pkg for CVE-IDs already detected by Trivy

* fix

* Add detectionMethod only when detected by gost
2023-04-17 09:21:30 +09:00
Sinclair
2cdfbe3bb4 fix: dependency graph using small query at once to avoid timeout (#1642) 2023-04-14 14:46:31 +09:00
MaineK00n
ac8290119d fix(configtest): amazon linux 2022, 2023 require dnf-utils (#1635) 2023-04-10 10:16:03 +09:00
MaineK00n
abdb081af7 feat(scanner): skip ssh config validation if G option is unknown option (#1632) 2023-04-04 18:50:17 +09:00
kurita0
e506125017 feat(wp): support csh, no sudo scan (#1523)
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2023-03-28 21:07:10 +09:00
MaineK00n
8ccaa8c3ef fix(scanner/windows): support installationType Domain Controller (#1627) 2023-03-28 21:04:17 +09:00
MaineK00n
de1ed8ecaa feat(ci): add windows for snmp2cpe (#1626) 2023-03-28 19:20:03 +09:00
MaineK00n
947d668452 feat(windows): support Windows (#1581)
* chore(deps): mod update

* fix(scanner): do not attach tty because there is no need to enter ssh password

* feat(windows): support Windows
2023-03-28 19:00:33 +09:00
MaineK00n
db21149f00 feat(contrib): add snmp2cpe (#1625) 2023-03-28 18:56:28 +09:00
dependabot[bot]
7f35f4e661 chore(deps): bump github.com/hashicorp/go-getter from 1.6.2 to 1.7.0 (#1606)
Bumps [github.com/hashicorp/go-getter](https://github.com/hashicorp/go-getter) from 1.6.2 to 1.7.0.
- [Release notes](https://github.com/hashicorp/go-getter/releases)
- [Changelog](https://github.com/hashicorp/go-getter/blob/main/.goreleaser.yml)
- [Commits](https://github.com/hashicorp/go-getter/compare/v1.6.2...v1.7.0)

---
updated-dependencies:
- dependency-name: github.com/hashicorp/go-getter
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-03-17 05:04:48 +09:00
MaineK00n
6682232b5c feat(os): support Amazon Linux 2023 (#1621) 2023-03-16 17:31:57 +09:00
sadayuki-matsuno
984debe929 fix(detector/github) change timeout 10s to 10m (#1616) 2023-03-01 16:58:11 +09:00
Kota Kanbe
a528362663 fix(saas): upload JSON if err occured during scan (#1615) 2023-03-01 14:52:03 +09:00
MaineK00n
ee97d98c39 feat: update EOL (#1598) 2023-02-22 16:00:05 +09:00
MaineK00n
4e486dae1d style: fix typo (#1592)
* style: fix typo

* style: add comment
2023-02-22 15:59:47 +09:00
MaineK00n
897fef24a3 feat(detector/exploitdb): mod update and add more urls (#1610) 2023-02-22 15:58:24 +09:00
MaineK00n
73f0adad95 fix: use GetCveContentTypes instead of NewCveContentType (#1603) 2023-02-21 11:56:26 +09:00
Sinclair
704492963c Revert: gost/Ubuntu.ConvertToModel() is public method now (#1597) 2023-02-08 11:36:36 +09:00
Sinclair
1927ed344c fix(report): tidy dependencies for multiple repo on integration with GSA (#1593)
* initialize dependencyGraphManifests out of loop

* remove GitHubSecurityAlert.PackageName

* tidy dependency map for multi repo

* set repo name into SBOM components & purl for multi repo
2023-02-07 19:47:32 +09:00
MaineK00n
ad2edbb844 fix(ubuntu): vulnerability detection for kernel package (#1591)
* fix(ubuntu): vulnerability detection for kernel package

* feat(gost/ubuntu): update mod to treat status: deferred as unfixed

* feat(ubuntu): support 22.10
2023-02-03 15:56:58 +09:00
MaineK00n
bfe0db77b4 feat(cwe): add cwe-id for category and view (#1578) 2023-01-20 18:02:07 +09:00
MaineK00n
ff3b9cdc16 fix: add comment (#1585) 2023-01-20 18:01:10 +09:00
Sinclair
2deb1b9d32 chore: update version for golangci-lint (#1586) 2023-01-20 18:00:54 +09:00
kl-sinclair
ca64d7fc31 feat(report): Include dependencies into scan result and cyclondex for supply chain security on Integration with GitHub Security Alerts (#1584)
* feat(report): Enhance scan result and cyclondex for supply chain security on Integration with GitHub Security Alerts

* derive ecosystem/version from dependency graph

* fix vars name && fetch manifest info on GSA && arrange ghpkgToPURL structure

* fix miscs

* typo in error message

* fix ecosystem equally to trivy

* miscs

* refactoring

* recursive dependency graph pagination

* change var name && update comments

* omit map type of ghpkgToPURL in signatures

* fix vars name

* goimports

* make fmt

* fix comment

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2023-01-20 15:32:36 +09:00
Brian Prodoehl
554ecc437e fix(report/email): add Critical to email summary (#1565)
* Add criticals to email summary

* chore(report/email): add Critical keys

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-12-20 11:56:07 +09:00
Kota Kanbe
f6cd4d9223 feat(libscan): support conan.lock C/C++ (#1572) 2022-12-20 11:22:36 +09:00
Kota Kanbe
03c59866d4 feat(libscan): support gradle.lockfile (#1568)
* feat(libscan): support gradle.lockfile

* add gradle.lockfile to integration test

* fix readme

* chore: update integration

* find *gradle.lockfile

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-12-20 08:52:45 +09:00
Kota Kanbe
1d97e91341 fix(libscan): delete map that keeps all file contents detected by FindLock to save memory (#1556)
* fix(libscan): delete Map that keeps all files detected by FindLock to save memory

* continue analyzing libs if err occurred

* FindLockDirs

* fix

* fix
2022-11-10 10:19:15 +09:00
MaineK00n
96333f38c9 chore(ubuntu): set Ubuntu 22.10 EOL (#1552) 2022-11-01 14:00:56 +09:00
MaineK00n
8b5d1c8e92 feat(cwe, cti): update dictionary (#1553)
* feat(cwe): update CWE dictionary

* feat(cti): update CTI dictionary

* fix(cwe): fix typo
2022-11-01 14:00:23 +09:00
MaineK00n
dea80f860c feat(report): add cyclonedx format (#1543) 2022-11-01 13:58:31 +09:00
dependabot[bot]
6eb4c5a5fe chore(deps): bump github.com/aquasecurity/trivy from 0.31.3 to 0.32.1 (#1538)
* chore(deps): bump github.com/aquasecurity/trivy from 0.31.3 to 0.32.1

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.31.3 to 0.32.1.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.31.3...v0.32.1)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* chore(deps): bump github.com/aquasecurity/trivy 0.32.1 to 0.33.0

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-10-27 01:24:06 +09:00
Kota Kanbe
b219a8495e fix(cpescan): match if affected version is NA (#1548)
https://github.com/vulsio/go-cve-dictionary/pull/283
2022-10-19 16:57:32 +09:00
Kota Kanbe
eb87d5d4e1 fix(saas): panic: runtime error: comparing uncomparable type config.PortScanConf (#1537) 2022-10-04 11:55:48 +09:00
tomofumi0003
6963442a5e fix(report): send report to each slack channel (#1530)
* fix send report to each slack channel

* fix(report): use w.Cnf.Channel instead of channel

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-09-29 16:08:36 +09:00
Kota Kanbe
f7299b9dba fix(scan): detect AL2 even when empty /etc/redhat-release (#1536) 2022-09-29 11:12:30 +09:00
Satoru Nihei
379fc8a1a1 fix: fix query (#1534) 2022-09-28 20:51:20 +09:00
MaineK00n
947fbbb29e fix(ms): always sets isPkgCvesDetactable to true (#1492) 2022-09-07 12:05:16 +09:00
MaineK00n
06d2032c9c docs: update slack invite URL (#1524) 2022-09-07 12:04:28 +09:00
dependabot[bot]
d055c48827 chore(deps): bump github.com/aquasecurity/trivy from 0.30.4 to 0.31.3 (#1526)
Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.30.4 to 0.31.3.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.30.4...v0.31.3)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-09-07 12:02:08 +09:00
MaineK00n
2a00339da1 fix(lockfiles): fix privileges in lockfile scan (#1512)
* fix(lockfiles): fix privileges in lockfile scan

* style(fmt): add space in comment line
2022-09-02 18:18:00 +09:00
kidokidofire
2d959b3af8 Fix func to get EC2 instance ID by IMDSv2. (#1522)
Co-authored-by: kido3160 <s.kido.fy@future.co.jp>
2022-08-25 14:31:48 +09:00
kidokidofire
595e26db41 Enable to get EC2 instance ID by IMDSv2. (#1520)
Co-authored-by: kido3160 <s.kido.fy@future.co.jp>
2022-08-24 17:39:45 +09:00
Kota Kanbe
1e457320c5 chore: bump up version (#1511) 2022-08-08 16:55:31 +09:00
MaineK00n
a06e689502 feat(cwe): add cwe top25 2022 (#1504) 2022-08-04 18:00:45 +09:00
MaineK00n
ca3f6b1dbf feat(amazon): support Amazon Linux 2 Extra Repository (#1510)
* feat(amazon): support Amazon Linux 2 Extra Repository

* feat(amazon): set Amazon Linux EOL

* feat(oracle): set Oracle Linux EOL
2022-08-04 17:52:42 +09:00
dependabot[bot]
f1c78e42a2 chore(deps): bump github.com/aquasecurity/trivy from 0.30.3 to 0.30.4 (#1507)
Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.30.3 to 0.30.4.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.30.3...v0.30.4)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-08-03 09:53:08 +09:00
MaineK00n
2f3b8bf3cc chore(rocky): set Rocky Linux 9 EOL (#1495) 2022-07-27 02:48:10 +09:00
MaineK00n
ab54266f9e fix(library): fill libraryFixedIns{}.key in ftypes.Pnpm and ftypes.DotNetCore (#1498)
* fix(library): fill key in ftypes.Pnpm and ftypes.DotNetCore

* chore(library): change the data structure of LibraryMap
2022-07-26 13:53:50 +09:00
dependabot[bot]
d79d138440 chore(deps): bump github.com/aquasecurity/trivy from 0.30.2 to 0.30.3 (#1499)
Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.30.2 to 0.30.3.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.30.2...v0.30.3)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-07-26 04:52:32 +09:00
dependabot[bot]
139f3a81b6 chore(deps): bump github.com/aquasecurity/trivy from 0.27.1 to 0.30.0 (#1494)
* chore(deps): bump github.com/aquasecurity/trivy from 0.27.1 to 0.30.0

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.27.1 to 0.30.0.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.27.1...v0.30.0)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* chore(deps): bump github.com/aquasecurity/trivy from 0.30.0 to 0.30.2

* fix(library): change fanal to trivy/pkg/fanal

* chore: update integration

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-07-25 16:47:57 +09:00
MaineK00n
d1a617cfff fix(ms): remove duplicate advisories (#1490) 2022-07-14 09:26:30 +09:00
MaineK00n
48f7597bcf feat(ms): import gost:MaineK00n/new-windows (#1481)
* feat(ms): import gost:MaineK00n/new-windows

* chore(discover): add CTI section

* feat(ms): fill KB with VulnInfo.DistroAdvisories instead of CveContent.Optional

* fix(ms): Change bitSize from 32 to 64

* fix(ms): delete KB prefix

* chore(ms): change logger

* fix(ms): fill in correct AdvisoryID

Co-authored-by: Sadayuki Matsuno <sadayuki.matsuno@gmail.com>
2022-07-04 14:26:41 +09:00
sadayuki-matsuno
93731311a1 feat(saas) add vuls tags from env (#1487) 2022-07-04 12:00:02 +09:00
MaineK00n
999529a05b feat(scanner): detect host key change (#1406)
* feat(scanner): detect host key change

* chore(scanner): add testcase
2022-07-04 10:57:43 +09:00
MaineK00n
847d820af7 feat(os): support Alpine Linux 3.16 (#1479) 2022-06-15 17:08:40 +09:00
MaineK00n
5234306ded feat(cti): add Cyber Threat Intelligence info (#1442)
* feat(cti): add Cyber Threat Intelligence info

* chore: replace io/ioutil as it is deprecated

* chore: remove --format-csv in stdout writer

* chore(deps): go get go-cti@v0.0.1

* feat(cti): update cti dict(support MITRE ATT&CK v11.1)

* chore(deps): go get go-cti@master
2022-06-15 17:08:12 +09:00
MaineK00n
86b60e1478 feat(config): support CIDR (#1415) 2022-06-10 18:24:25 +09:00
MaineK00n
42fdc08933 feat(os): support RHEL 9, CentOS Stream 9, Alma Linux 9 (#1465)
* feat(os): support RHEL 9

* feat(os): support CentOS Stream9, AlmaLinux 9
2022-06-09 06:39:16 +09:00
MaineK00n
38b1d622f6 feat(cwe): update CWE dictionary (#1443) 2022-06-09 06:36:54 +09:00
MaineK00n
2477f9a8f8 chore: tidy go.mod, add arm64 and workflows update (#1461)
* chore: tidy go.mod

* chore(gh): add arm64 and workflows update

* chore: disable staticcheck SA1019 for xerrors.Errorf

* chore: fix github.com/boltdb/bolt switch to github.com/etcd-io/bbolt? #1457
2022-06-09 06:10:07 +09:00
kurita0
ec6e90acd3 fix getting wp core version string via ssh (#1344)
* fix getting wp core version string via ssh

* check DocRoot
2022-06-09 06:05:15 +09:00
sadayuki-matsuno
2aca2e4352 feat(contrib/trivy) fill image info into scan results (#1475)
* feat(contrib/trivy) fill image info into scan results

* fix match size

* fix match size
2022-06-08 17:00:32 +09:00
sadayuki-matsuno
14518d925e fix(contriv/fvuls) initialize optional map (#1469) 2022-05-30 12:46:53 +09:00
sadayuki-matsuno
948f8c0751 add VULS_TAGS env into contiriv future-vuls (#1466) 2022-05-24 13:46:28 +09:00
sadayuki-matsuno
1c1e40058e feat(library) output library type when err (#1460) 2022-05-16 09:58:58 +09:00
Satoru Nihei
2158fc6cb1 fix: judge by scannedVia (#1456) 2022-05-06 09:38:38 +09:00
MaineK00n
91ed318c5d chore(deps): update trivy v0.27.1 (#1453)
* chore(deps): update trivy v0.27.1

* chore: add gosum
2022-04-27 15:43:23 +09:00
MaineK00n
bfc3828ce1 chore(deps): update goval-dictionary and gost (#1452) 2022-04-27 13:03:11 +09:00
dependabot[bot]
c7eac4e7fe chore(deps): bump github.com/aquasecurity/trivy from 0.25.4 to 0.27.0 (#1451)
* chore(deps): bump github.com/aquasecurity/trivy from 0.25.4 to 0.27.0

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.25.4 to 0.27.0.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.25.4...v0.27.0)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* fix(library): support go.mod scan

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-04-27 12:46:47 +09:00
MaineK00n
cc63a0eccf feat(ubuntu): add Jammy Jellyfish(22.04) (#1431)
* feat(ubuntu): add Jammy Jellyfish(22.04)

* chore(deps): gost update

* chore(oval/ubuntu): fill kernel package name temporarily
2022-04-27 11:04:00 +09:00
Satoru Nihei
fd18df1dd4 feat: parse OS version from result of trivy-scan (#1444)
* chore(deps): bump github.com/aquasecurity/trivy from 0.24.2 to 0.25.4

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.24.2 to 0.25.4.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.24.2...v0.25.4)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* test: add testcase

* feat: parse metadata

* refactor: change detect logic

* refactor: change parsing logic

* refactor: refactor check logic before detect

* fix: impl without reuseScannedCves

* feat: complement :latest tag

* Update contrib/trivy/parser/v2/parser.go

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-04-27 10:28:20 +09:00
MaineK00n
8775b5efdf chore: fix lint error (#1438)
* chore: fix lint: revive error

* chore: golanci-lint uses go 1.18

* chore: refactor tasks in GNUmakefile

* chore: add trivy binary in fvuls image
2022-04-15 18:12:13 +09:00
dependabot[bot]
a9f29a6c5d chore(deps): bump github.com/aquasecurity/trivy from 0.24.2 to 0.25.1 (#1436)
* chore(deps): bump github.com/aquasecurity/trivy from 0.24.2 to 0.25.0

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.24.2 to 0.25.0.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.24.2...v0.25.0)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* chore(deps): bump up Go to 1.18 and trivy v0.25.1

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-04-05 13:27:49 +09:00
Satoru Nihei
05fdde48f9 feat: support server scan for suse with text/plain (#1433) 2022-04-04 12:45:44 +09:00
MaineK00n
3dfbd6b616 chore(mod): update go-exploitdb module (#1428)
* chore(mod): update go-exploitdb module

* docs: add inthewild datasource

* Unique because URLs sometimes duplicate on GitHub and InTheWild

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2022-03-26 05:26:06 +09:00
MaineK00n
04f246cf8b chore: add fvuls image (#1426) 2022-03-25 06:17:33 +09:00
MaineK00n
7500f41655 chore(mod): update go-kev module (#1425) 2022-03-25 06:15:06 +09:00
MaineK00n
a1cc152e81 feat(library): add auto detect library (#1417) 2022-03-17 18:08:40 +09:00
Masato Yagi
1c77bc1ba3 feat: replace NVD-column with packages-column at output of report (#1414)
* replace NVD-col with packages-col

* fix typo

* set table row line
2022-03-17 17:14:41 +09:00
Satoru Nihei
ec31c54caf chore: update trivy from 0.23.0 to 0.24.02 (#1407)
* chore: update trivy from 0.23.0 to 0.24.2

* chore: deal with changing structs

see: 11f4f81123
2022-03-04 16:00:08 +09:00
Satoru Nihei
2f05864813 fix: handling when image contains no trivy-target (#1405)
* fix: handling when image contains no trivy-target

* refactor: use scanResult.Optional

* fix: add suppoted list to error message
2022-03-02 06:13:26 +09:00
Kota Kanbe
2fbc0a001e fix: nil pointer when no match for any OS (#1401)
* refactor: rename serverapi.go to scanner.go

* fix: nil pointer if no match for any OS
2022-02-24 07:58:29 +09:00
MaineK00n
7d8a24ee1a refactor(detector): standardize db.NewDB to db.CloseDB (#1380)
* feat(subcmds/report,server): read environment variables when configPath is ""

* refactor: standardize db.NewDB to db.CloseDB

* chore: clean up import

* chore: error wrap

* chore: update goval-dictionary

* fix(oval): return Pseudo instead of nil for client

* chore: fix comment

* fix: lint error
2022-02-19 09:20:45 +09:00
MaineK00n
7750347010 fix(oval/suse): use def.Advisory.Cves[0].CveID instead of def.Title (#1397) 2022-02-17 19:16:14 +09:00
MaineK00n
9bcffcd721 fix(configtest,scan): fix validateSSHConfig (#1395)
* fix(configtest,scan): support StrictHostKeyChecking no

* fix(configtest,scan): support ServerTypePseudo

* fix(configtest,scan): skip if using proxy
2022-02-17 08:15:23 +09:00
MaineK00n
787604de6a fix(suse): fix openSUSE, openSUSE Leap, SLES, SLED scan (#1384)
* fix(suse): fix openSUSE, openSUSE Leap scan

* docs: update README

* fix: unknown CveContent.Type

* fix: tui reporting

* fix: listening port was duplicated in format-full-text

* fix .gitignore

* fix: add EOL data for SLES12.5

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2022-02-15 17:11:54 +09:00
MaineK00n
5164fb1423 fix(util): Major() behavior for major version (#1393) 2022-02-15 07:59:29 +09:00
MaineK00n
07335617d3 fix(configtest,scan): support SSH config file (#1388)
* fix(configtest,scan): support SSH config file

* chore(subcmds): remove askKeyPassword flag
2022-02-12 21:50:56 +09:00
MaineK00n
e5855922c1 fix(redhat): detect RedHat version (#1387)
* fix(redhat): detect RedHat version

* fix err fmt string

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2022-02-12 20:09:51 +09:00
MaineK00n
671be3f2f7 feat(configtest,scan): detect known_hosts error (#1386) 2022-02-11 12:54:17 +09:00
MaineK00n
fe8d252c51 feat(debian): validate running kernel version (#1382)
* feat(debian): validate running kernel version

* chore(gost/debian): only stash when there is linux package
2022-02-11 12:36:48 +09:00
MaineK00n
0cdc7a3af5 chore(oval): update mod (#1385) 2022-02-09 10:20:07 +09:00
maito1201
1cfe155a3a feat(fedora): support fedora (#1367)
* feat(fedora): support fedora

* fix(fedora): fix modular package scan

* fix(fedora): check needs-restarting, oval arch, add source link

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-02-09 09:30:44 +09:00
MaineK00n
2923cbc645 fix(centos): identify CentOS and CentOS Stream (#1360) 2022-02-03 05:32:03 +09:00
MaineK00n
7c209cc9dc fix(gost): add nil check (#1379) 2022-02-03 05:25:11 +09:00
MaineK00n
84fa4ce432 feat(alpine): add Alpine 3.14, 3.15 EOL (#1359)
* feat(alpine): add Alpine 3.14, 3.15 EOL

* fix(alpine): change test case
2022-02-02 06:46:52 +09:00
MaineK00n
f2e9cd9668 fix(oval): fix query in PostgreSQL (#1372)
Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2022-02-02 06:46:02 +09:00
Kota Kanbe
77049d6cbb feat(libscan): support trivy v0.23.0 (#1377)
* feat(libscan): support trivy v0.23.0

* fix lint err

* review
2022-02-01 10:40:16 +09:00
sadayuki-matsuno
b4c23c158b fix(scanner/base) export libFile fields (#1366) 2022-01-18 11:56:12 +09:00
sadayuki-matsuno
964b4aa389 fix(scanner/base) export libFile (#1365) 2022-01-18 11:31:36 +09:00
Kota Kanbe
dc5aa35db7 chore: update git submodule for integration test (#1364) 2022-01-18 10:22:00 +09:00
dependabot[bot]
43c05d06fc chore(deps): bump github.com/aquasecurity/trivy from 0.20.0 to 0.22.0 (#1350)
* chore(deps): bump github.com/aquasecurity/trivy from 0.20.0 to 0.22.0

Bumps [github.com/aquasecurity/trivy](https://github.com/aquasecurity/trivy) from 0.20.0 to 0.22.0.
- [Release notes](https://github.com/aquasecurity/trivy/releases)
- [Changelog](https://github.com/aquasecurity/trivy/blob/main/goreleaser.yml)
- [Commits](https://github.com/aquasecurity/trivy/compare/v0.20.0...v0.22.0)

---
updated-dependencies:
- dependency-name: github.com/aquasecurity/trivy
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* fix(library): trivy scan

* chore(integration): add lockfiles

* fix(library): support gobinary scan via trivy

* chore: add pom in IsTrivySupportedLib

* chore: fix LIBS

* fix(library): support trivy offline scan

* chore(integration): move vulsio/integration repository

* chore(integration): add integration as git submodule

* chore: update .gitignore

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2022-01-18 08:27:11 +09:00
Kota Kanbe
a3f7d1d7e7 feat(go-kev): update go-kev deps (#1352) 2022-01-07 08:21:48 +09:00
Kota Kanbe
bb4a1ca6c2 GPLv3 (#1351) 2021-12-26 09:08:38 +09:00
Kota Kanbe
57cce640e1 Create SECURITY.md 2021-12-26 08:51:44 +09:00
kurita0
1eb5d36668 fix configtest stalled with scanMode=fast-root (#1339)
* fix configtest stalled with scanMode=fast-root

* repoquery does not require sudo privileges on centos
2021-12-26 08:31:11 +09:00
MaineK00n
6bc4850596 fix(detector/ospkg): Skip OVAL/gost search when the number of packages is 0 (#1343)
* fix(detector/ospkg): Skip OVAL/gost search when the number of packages is 0

* chore: easy refactoring
2021-12-26 07:53:18 +09:00
MaineK00n
24005ae7ae chore(GHActions): replace with dependabot (#1348)
* chore(GHActions): replace with dependabot

* chore(GHActions): remove tidy.yml due to deprecation
2021-12-26 07:48:11 +09:00
MaineK00n
7aa296bb57 fix(oval): fix RDB query (#1347) 2021-12-26 07:47:52 +09:00
Kota Kanbe
3829ed2f8e Fix the parsing logic of FreeBSD pkg-audit (#1334)
* fix scanUnsecurePackages for FreeBSD pkg audit output change

* Add test case TestParseBlock for FreeBSD pkg audit output change

* Fix for no CVE in a block

* fix(scan): parse logic of pkg-audit

* fix

ca761fb218

Co-authored-by: User Kurita <kurita@vuls0.digitiminimi.com>
2021-12-24 10:27:38 +09:00
MaineK00n
2b7294a504 feat(amazon): support amazon linux 2022 (#1338) 2021-12-09 11:06:44 +09:00
MaineK00n
0c6a892893 style: fix lint (#1335) 2021-11-19 15:46:51 +09:00
MaineK00n
89d94ad85a feat(detector): add known exploited vulnerabilities (#1331)
* feat(kevuln): add known exploited vulnerabilities

* chore: transfer repository owner

* feat: show CISA on top of CERT

* chore: rename var

* chore: rename var

* chore: fix review

* chore: fix message
2021-11-19 15:06:17 +09:00
sadayuki-matsuno
ffdb78962f update dictionaries (#1326) 2021-10-29 11:24:49 +09:00
Kota Kanbe
321dae37ce chore: update readme 2021-10-24 17:38:57 +09:00
Kota Kanbe
a31797af0b Merge branch 'sakura' 2021-10-24 17:33:48 +09:00
Kota Kanbe
32999cf432 chore: udpate readme 2021-10-24 17:32:35 +09:00
Kota Kanbe
88218f5d92 chore: update sponsor (#1325) 2021-10-24 17:25:03 +09:00
Kota Kanbe
15761933ac chore: update sponsor 2021-10-24 17:01:35 +09:00
Kota Kanbe
0b62842f0e chore: fix go-sqlite3 deps (#1324) 2021-10-20 12:33:59 +09:00
Kota Kanbe
6bceddeeda chore: update goval-dictionary (#1323)
* chore: update goval-dictionary

* fix errs
2021-10-20 11:10:33 +09:00
Kota Kanbe
2dcbff8cd5 chore: sponsor (#1321)
* fix readme

* chore: fix lint
2021-10-17 16:41:51 +09:00
Kota Kanbe
8659668177 fix(cpescan): bug in NvdVendorProductMatch (#1320)
* fix(cpescan): bug in NvdVendorProductMatch

* update go mod
2021-10-13 12:55:01 +09:00
Kota Kanbe
e07b6a9160 feat(report): show Amazon ALAS link to report (#1318) 2021-10-12 09:09:58 +09:00
Kota Kanbe
aac5ef1438 feat: update-trivy (#1316)
* feat: update-trivy

* add v2 parser

* implement v2

* refactor

* feat: add show version to future-vuls

* add test case for v2

* trivy v0.20.0

* support --list-all-pkgs

* fix lint err

* add test case for jar

* add a test case for gemspec in container

* remove v1 parser and change Library struct

* Changed the field name in the model struct LibraryScanner

* add comment

* fix comment

* fix comment

* chore

* add struct tag
2021-10-08 17:22:06 +09:00
sadayuki-matsuno
d780a73297 add log json option (#1317) 2021-10-07 16:00:01 +09:00
Kota Kanbe
9ef8cee36e refactor(exploitdb): use pipeline effectively (#1314)
https://github.com/vulsio/go-exploitdb/pull/64
2021-10-01 09:10:49 +09:00
Kota Kanbe
77808a2c05 feat(go-cve): add error handling (#1313) 2021-09-30 12:42:43 +09:00
MaineK00n
177e553d12 feat(go-exploitdb): add error handling (#1310)
* feat(go-exploitdb): add error handling

* chore: rename

* go get -u go-exploitdb

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2021-09-30 11:33:18 +09:00
MaineK00n
40f8272a28 feat(go-msfdb): add error handling and support http mode (#1308)
* feat(go-msfdb): add error handling

* feat(go-msfdb): support http mode

* go get -u go-msfdb

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2021-09-30 11:16:41 +09:00
MaineK00n
a7eb1141ae feat(gost): add error handling (#1311)
* feat(gost): add error handling

* go get -u gost

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2021-09-30 10:51:41 +09:00
Kota Kanbe
c73ed7f32f chore: update find-lock file type (#1309) 2021-09-24 16:23:23 +09:00
Kota Kanbe
f047a6fe0c breaking-change: Update vuls-dictionaries (#1307)
* chore: udpate dictionaries

* update gost

* chore: update gost

* chore(go-cve-dict): use v0.8.1

* chore: change linter from golint to revive

* chore(linter): set revive config

* chore: fix commands and update golangci-lint version

* fix: lint errs

* chore: update gost

Co-authored-by: MaineK00n <mainek00n.1229@gmail.com>
2021-09-21 05:10:29 +09:00
MaineK00n
7f15a86d6a chore: change repository owner (#1306) 2021-09-16 11:05:37 +09:00
Kota Kanbe
da1e515253 breaking-change(goval): change-redis-architecture (#1305)
https://github.com/kotakanbe/goval-dictionary/pull/145
2021-09-15 08:25:14 +09:00
MaineK00n
591786fde6 feat(oval): support new goval-dictionary model (#1280)
* feat(oval): support new goval-dictionary model

* chore: fix lint err

* chore: set len of slice to 0

* fix(oval): avoid contamination of AffectedPackages by writing directly to defPacks

* fix(oval): avoid contamination of AffectedPackages by writing directly to defPacks

* feat(report): do not add duplicate CveContent

* chore: goval-dictionary update

* chore: go mod tidy

* fix(oval): preload Advisory.Cves for Ubuntu

https://github.com/kotakanbe/goval-dictionary/pull/152

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2021-09-13 10:19:59 +09:00
Kota Kanbe
47e6ea249d chore: fix lint warning (#1301) 2021-09-12 20:35:56 +09:00
Kota Kanbe
4a72295de7 feat(saas): support for library-only scanning (#1300) 2021-09-10 15:38:35 +09:00
MaineK00n
9ed5f2cac5 feat(debian): support Debian 11(bullseye) (#1298)
* feat(debian): support bullseye

* fix(debian): fix test case
2021-09-08 10:47:34 +09:00
Kota Kanbe
3e67f04fe4 breaking-change(cpescan): Improve Cpe scan (#1290)
* chore(cpescan): enable to pass useJvn to detector.DetectCpeURIsCves()

* review comment

* chore: go mod update go-cve

* feat(cpescan): set JvnVendorProductMatch to confidence If detected by JVN

* add NvdExactVersionMatch andd NvdRoughVersionMatch

* add confidence-over option to report

* sort CveContetens

* fix integration-test
2021-09-07 16:18:59 +09:00
Kota Kanbe
b9416ae062 fix(report): too many SQL variables (#1296)
* fix(report): too many SQL variables

https://github.com/kotakanbe/go-cve-dictionary/pull/210

* fix lint err
2021-09-01 10:42:19 +09:00
otuki
b4e49e093e feat(GAdocker): Publish docker image with Github Actions (#1291)
* feat(GAdocker): publish docker image with Github Actions

* feat(master): publish Docker image with GHActions:

* feat(docker): publish docker image with GHAtions

* feat(master): remove unnecessary GHActions

* feat(master): remove unnecessary GHActions

* feat(master): Add user ID and password at Docker GHActions

* feat(master): Add user ID and password with docker/login
2021-09-01 08:44:55 +09:00
Kota Kanbe
020f6ac609 fix(scan): warning if err occurred while scanning ports (#1294)
[Aug 26 20:59:11] ERROR [localhost] Error on host, err: [Failed to scan Ports:
    github.com/future-architect/vuls/scanner.Scanner.getScanResults.func1
        /go/src/github.com/future-architect/vuls/scanner/serverapi.go:658
  - dial tcp 172.19.0.1:80: connect: no route to host]

Scan Summary
================
host    Error           Use configtest subcommand or scan with --debug to view the details

[Aug 26 20:59:11] ERROR [localhost] Failed to scan: Failed to scan. err:
    github.com/future-architect/vuls/scanner.Scanner.Scan
        /go/src/github.com/future-architect/vuls/scanner/serverapi.go:103
  - An error occurred on [host]
2021-08-27 06:20:50 +09:00
sadayuki-matsuno
7e71cbdd46 fix(gost) sort in ms converter (#1293) 2021-08-26 14:32:45 +09:00
Kota Kanbe
1003f62212 chore: update go-cve-dictionary (#1292) 2021-08-26 13:45:40 +09:00
Kota Kanbe
9b18e1f9f0 breaking-change(go-exploitdb): support new go-exploitdb (#1288) 2021-08-20 08:00:57 +09:00
Kota Kanbe
24f790f474 feat(go-cve): update go-cve-dictionary (#1287)
diff: a31a3152c1...5043255
2021-08-19 05:34:03 +09:00
MaineK00n
fb8749fc5e fix(cpescan): fix confidence in cpe uri scan (#1286)
* fix(cpescan): fix confidence in cpe uri scan

* feat(cpe): add NA case

* chore: use HasNvd, HasJvn instead of len

* chore: go-cve-dictionary update
2021-08-19 04:59:09 +09:00
MaineK00n
96c3592db1 breaking-change(go-cve-dict): support new go-cve-dictionary (#1277)
* feat(model): change CveContents(map[string]CveContent) to map[string][]CveContent

* fix(cpescan): use CveIDSource

* chore: check Nvd, Jvn data

* chore: go-cve-dictionary update

* chore: add to cveDetails as is, since CveID is embedded in the response
2021-08-13 18:00:55 +09:00
Kota Kanbe
d65421cf46 fix(cpescan): JVN scan False-Negative on RDB-backend (#1283)
https://github.com/kotakanbe/go-cve-dictionary/pull/199
2021-08-13 09:58:04 +09:00
Kota Kanbe
c52ba448cd chore: update readme (#1282) 2021-08-12 09:37:45 +09:00
Kota Kanbe
21adce463b update readme 2021-08-12 09:31:12 +09:00
MaineK00n
f24240bf90 feat(library): update trivy v0.19.2 (#1278) 2021-08-02 05:40:57 +09:00
kazuminn
ff83cadd6e feat(os) : support Alma Linux (#1261)
* support Alma Linux

* fix miss

* feat(os) : support Rocky linux  (#1260)

* support rocky linux scan

* fix miss

* lint

* fix : like #1266 and error Failed to parse CentOS

* pass make test

* fix miss

* fix pointed out with comment

* fix golangci-lint error
2021-08-02 04:36:43 +09:00
Phil
e8c09282d9 Update ubuntu.go (#1279)
URI correction for ubuntu; see gost project: https://github.com/knqyf263/gost/blob/master/server/server.go#L48
2021-08-02 04:25:51 +09:00
Kota Kanbe
5f4d68cde4 feat(go-msf): update deps (#1275)
https://github.com/takuzoo3868/go-msfdb/pull/22
2021-07-21 09:13:34 +09:00
Kota Kanbe
9077a83ea8 fix(docker): docker build error (#1274) 2021-07-20 05:31:05 +09:00
Kota Kanbe
543dc99ecd fix(cpescan): CpeVendorProductMatch not set when Redis Backend (#1273)
* fix(cpescan): CpeVendorProductMatch not set when Redis Backend

* fix(integration): deprecated CPE URI

* fix(integration-test): add a test case for CpeVendorProductMatch

* fix review

* update deps go-cve-dict v0.6.2
2021-07-19 08:43:58 +09:00
Kota Kanbe
f0b3a8b1db feat(cpescan): Use JVN as a second DB for CPE scan (#1268)
* feat(cpescan): Use JVN as a second DB for CPE scan

* feat(tui): display score of detectionmethod

* update go.mod
2021-07-08 12:39:46 +09:00
Norihiro NAKAOKA
0b9ec05181 Support scanning Ubuntu using Gost (#1243)
* chore: add vuls binary in gitignore

* feat(gost): support ubuntu

* chore(debian): fix typo

* feat(ubuntu): more detail on CveContent

* chore: update .gitignore

* chore: update gost deps

* feat(ubuntu): add test in gost/ubuntu

* chore: fix typo

* Revert "chore: fix typo"

This reverts commit 9f2f1db233.

* docs: update README
2021-07-08 08:31:46 +09:00
Norihiro NAKAOKA
0bf12412d6 fix(rocky): fix Scan in Rocky Linux (#1266)
* fix(rocky): fix OVAL scan in Rocky Linux

* chore: add FreeBSD13 EOL, fix #1245

* chore(rocky): add Rocky Linux EOL tests

* feat(rocky): implement with reference to CentOS

* feat(raspbian): add Raspbian to Server mode

* feat(rocky): support gost scan

* fix(rocky): rocky support lessThan

* chore: update doc and comment
2021-07-08 05:39:48 +09:00
Peter Sedgewick
0ea4d58c63 fix(gost): Use DBDriver ctx in Psuedo (#1264) 2021-07-02 06:18:44 +09:00
kazuminn
5755b00576 feat(os) : support Rocky linux (#1260)
* support rocky linux scan

* fix miss

* lint
2021-07-02 05:35:47 +09:00
Shigechika AIKAWA
1c8e074c9d Feat report googlechat (#1257) (#1258)
* feat: Support Ubuntu21

* feat(report): Send report via Google Chat

* feat(report): Send report via Google Chat

* Snip too long message as (The rest is omitted).

* sorry for mixed feat-ubuntu21 branch. exlucded it

* append diff, attack vector and exploits info

* add ServerName filter by regexp

* rename variables and rewrite validators

* fix renaming miss

* fix renaming miss, again
2021-07-02 05:32:00 +09:00
Shigechika AIKAWA
0e0e5ce4be feat: Support Ubuntu21 (#1231) 2021-06-28 10:28:54 +09:00
Kota Kanbe
23dfe53885 chore: update go-exploitdb (#1262) 2021-06-28 08:29:16 +09:00
Norihiro NAKAOKA
8e6351a9e4 feat(oval): goval-dictionary update (#1259)
* feat(oval): err check for GetLastModified

* feat(oval): goval-dictionary update
2021-06-25 14:08:50 +09:00
Shigechika AIKAWA
3086e2760f fix Ubuntu 20.10 End of Life on July 22 2021 (#1256) 2021-06-23 08:14:38 +09:00
Norihiro NAKAOKA
b8db2e0b74 feat(report): Change the priority of CVE information in Debian (#1202)
* fix (bug) : using ScanResults refs #1019

* feat(gost): WIP change priority of CVE Info in Debian

* feat(report): change priority of CVE Info in Debian

* refactor: move RemoveRaspbianPackFromResult

* style: remove comment

* fix: lint error

* style: change coding style

* feat(report): support reporting with gost alone

* fix: merge error

* refactor(debian): change code to be simple
2021-06-21 15:14:41 +09:00
Kota Kanbe
43b46cb324 chore: add test data for integration test (#1254) 2021-06-17 14:01:10 +09:00
Kota Kanbe
d0559c7719 chore: update gost deps (#1253) 2021-06-16 18:45:48 +09:00
Kota Kanbe
231c63cf62 fix(libscan): support empty LibraryFixedIn (#1252) 2021-06-16 13:28:12 +09:00
Kota Kanbe
2a9aebe059 fix(report): improve cpe match logic (#1251)
* fix(report): improve cpe match logic

https://github.com/kotakanbe/go-cve-dictionary/pull/189

* fix vet error
2021-06-11 14:39:41 +09:00
Kota Kanbe
4e535d792f chore: fix build-tags in .goreleaser.yml (#1250) 2021-06-09 09:49:26 +09:00
Kota Kanbe
4b487503d4 chore: add go.sum test data for integration test (#1249)
* add go.sum test data for integration test

* chore: .gitignore
2021-06-09 09:18:32 +09:00
Kota Kanbe
0095c40e69 fix(vet): go vet err of make build-scanner (#1248) 2021-06-09 08:00:52 +09:00
Kota Kanbe
82c1abfd3a fix(report): detection logic bugs for Oracle Linux (#1247)
* fix(report): continue detecting if arch is emtpy for Oracle Linux

* fix test case

* fix(report): a bug of `Not Fixed Yet` of Oracle linux scanning
2021-06-09 05:46:42 +09:00
sadayuki-matsuno
40988401bd feat(scanner) separate func analize libraries (#1246)
* feat(scanner) separate func analize libraries

* fix(scanner) fix typo
2021-06-04 07:42:29 +09:00
Kota Kanbe
e8e3f4d138 feat(lib): support of Go (go.sum) scan (#1244)
* chore: update trivy deps

* fix(test): fix sort order in json

* parse go.sum in scanning

* feat(lib): support go.sum
2021-06-03 11:31:37 +09:00
Norihiro NAKAOKA
7eb77f5b51 feat(scan): support external port scanner(nmap) in host machine (#1207)
* feat(scan): load portscan settings from config.toml

* feat(scan): support external port scanner:nmap

* style: rename variable

* feat(scan): logging apply options

* feat(scan): remove spoof ip address option

* feat(scan): more validate port scan config

* style: change comment

* fix: parse port number as uint16

* feat(discover): add portscan section

* feat(discover): change default scanTechniques

* feat(docker): add nmap and version update

* feat(scan): nmap module upgrade

* fix: wrap err using %w

* feat(scan): print cmd using external port scanner

* feat(scan): more details external port scan command

* feat(scan): add capability check in validation

* fix(scanner): format error

* chore: change format
2021-05-26 09:35:28 +09:00
Kota Kanbe
e115235299 fix(test): dev mode to false in package-lock.json (#1242)
* fix(test): dev mode to false in package-lock.json

* fix: vet warning
2021-05-17 08:04:16 +09:00
otuki
151d4b2d30 fix(scan): Avoid panic when SSH connection refused (#1236)
* fix(fix-ssh-fata): Avoid panic when SSH connection refused

* chore(fix-ssh-fata): fix typo
2021-05-12 18:30:26 +09:00
Kota Kanbe
e553f8b4c5 feat(trivy): go mod update trivy v0.17.2 (#1235)
* feat(trivy): go mod update trivy v0.17.2

* wg.Wait

* fix reporting

* fix test case

* add gemfile.lock of redmine to integration test

* fix(test): add Pipfile.lock

* add poetry.lock to integration test

* add composer.lock to integration test

* add integration test case
2021-05-12 18:27:55 +09:00
Kota Kanbe
47652ef0fb fix(report): include the num of criticals in total #1233 (#1234) 2021-05-07 07:57:33 +09:00
Kota Kanbe
ab0e950800 fix(oracle): extracting only advisory ID from OVAL.title (#1232) 2021-04-29 12:54:36 +09:00
otuki
a7b0ce1c85 refactor(git-conf): config template in github section changed (#1229) 2021-04-28 14:53:11 +09:00
otuki
dc9c0edece refactor(git-conf): Specifing ignoreGitHubDismissed per repository (#1224)
* refactor(git-conf): Specifing ignoreGitHubDismissed per repository with config.toml

* refactor(git-conf): change json tag into camelCase

* refactor(git-conf): change first char of json tag into lowercase
2021-04-28 13:41:38 +09:00
Kota Kanbe
17ae386d1e chore: add a test case #1227 (#1228) 2021-04-28 12:18:18 +09:00
Kota Kanbe
2d369d0cfe Fix false positive for Oracle Linux (#1227)
* fix(oracle): false-positive(handle arch of pkgs)

* fix(oracle): false positive kernel-related CVEs

* add a test case for ksplice1

* fix(scan): handle uek kernel for Oracle linux

* fix(scan): hanlde uek kernel for reboot required

* fix(oracle): false-positive for redis-backend
2021-04-27 20:38:45 +09:00
Kota Kanbe
c36e645d9b fix(report): false positive for kernel-related CVE for RedHat, CentOS, Oracle and Amazon #1199 (#1223) 2021-04-23 08:59:46 +09:00
Kota Kanbe
40039c07e2 fix(report): panic when closing db connection of gost (#1222) 2021-04-23 06:14:12 +09:00
Kota Kanbe
a692cec0ef fix(gost): close gost DB connection in server mode #1217 (#1221) 2021-04-21 11:59:11 +09:00
otuki
e7ca491a94 fix(report): Avoid http reports error (#1216) 2021-04-21 10:00:58 +09:00
Shigechika AIKAWA
23f3e2fc11 fix(config): add Ubuntu 20.10 (#1218) 2021-04-21 09:05:33 +09:00
Kota Kanbe
27b3e17b79 feat(saas): delete json dir automatically after upload (#1212)
* feat(saas): delete json dir automatically after upload

* fix lint err
2021-04-15 05:58:41 +09:00
Kota Kanbe
740781af56 feat(logging): add -log-to-file and don't output to file by default (#1209)
* feat(logging): add -log-to-file and don't output to file by default

* update go-cve-dict

* fix lint err
2021-04-05 17:41:07 +09:00
Kota Kanbe
36c9c229b8 fix(report): avoid nil pointer when report FreeBSD (#1208) 2021-04-05 12:54:27 +09:00
Norihiro NAKAOKA
183fdcbdef fix: support for missing files in the results or results directory (#1206)
* fix: support for missing files in the results or results directory

* fix: support for missing files in the results or results directory
2021-04-05 07:28:20 +09:00
Kota Kanbe
a2a697900a refactor: move const to constant pkg (#1205) 2021-04-02 15:33:02 +09:00
Kota Kanbe
6fef4db8a0 fix .goreleaser.yml (#1204)
* fix .goreleaser.yml

* chore: fix lint warnings
2021-04-01 17:43:54 +09:00
sadayuki-matsuno
e879ff1e9e feat(scanner) export pkg list scan method (#1203)
* feat(scanner) export pkg list scan method

* fix args

* fix func

* fix init debian
2021-04-01 17:38:20 +09:00
Kota Kanbe
9bfe0627ae refactor: don't use global Config in private func (#1197)
* refactor: cve_client.go

* refactor: don't use global Config in private func

* remove import alias for config

* refactor: dbclient

* refactor: resultDir

* refactor: resultsDir

* refactor

* refactor: gost

* refactor: db client

* refactor: cveDB

* refactor: cvedb

* refactor: exploitDB

* refactor: remove detector/dbclient.go

* refactor: writer

* refactor: syslog writer

* refactor: ips

* refactor: ensureResultDir

* refactor: proxy

* fix(db): call CloseDB

* add integration test

* feat(report): sort array in json

* sort func for json diff

* add build-int to makefile

* add int-rds-redis to makefile

* fix: test case, makefile

* fix makefile

* show cve count after diff

* make diff

* diff -c

* sort exploits in json for diff

* sort metasploit, exploit
2021-04-01 13:36:24 +09:00
Tomoya Amachi
0179f4299a fix(trivy-to-vuls): converts even if null vulnerabilities (#1201) 2021-03-22 19:32:08 +09:00
Kota Kanbe
56017e57a0 feat(trivy): update trivy (#1196) 2021-03-12 09:31:48 +09:00
Kota Kanbe
cda91e0906 refactor: loading owasp dependency check xml (#1195) 2021-03-11 08:51:44 +09:00
Kota Kanbe
5d47adb5c9 fix(report): prioritize env vars over config.toml (#1194) 2021-03-10 07:39:58 +09:00
Kota Kanbe
54e73c2f54 fix(wordpress): enable to detect vulns of WordPress Core (#1193) 2021-03-09 10:40:52 +09:00
segatomo
2d075079f1 fix(log): remove log output of opening and migrating db (#1191)
* fix(log): remove log output of opening and migrating db

* fix(log): remove log output of opening and migrating db
2021-03-05 16:16:10 +09:00
Kota Kanbe
2a8ee4b22b refactor(report): azure and aws writer (#1190) 2021-03-04 07:42:38 +09:00
Kota Kanbe
1ec31d7be9 fix(configtest): all servers in the config if no args #1184 (#1189) 2021-03-03 12:51:07 +09:00
Kota Kanbe
02286b0c59 fix(scan): scan all servers in the config if no args #1184 (#1188) 2021-03-03 12:30:30 +09:00
Kota Kanbe
1d0c5dea9f fix(ubuntu): Fix deferred packages not showing as affected (#1187)
* fix(ubuntu): Fix deferred packages not showing as affected

https://github.com/kotakanbe/goval-dictionary/pull/122

* chore: Go version up
2021-03-02 07:50:35 +09:00
Kota Kanbe
1c4a12c4b7 refactor(report): initialize DB connection (#1186) 2021-03-02 06:34:46 +09:00
Kota Kanbe
3f2ac45d71 Refactor logger (#1185)
* refactor: logger

* refactor: logging

* refactor: rename func

* refactor: logging

* refactor: logging format
2021-02-26 10:36:58 +09:00
Kota Kanbe
518f4dc039 refactor: VulnDict (#1183) 2021-02-25 10:13:51 +09:00
Kota Kanbe
2cdeef4ffe refactor(config): validateOnReport (#1182) 2021-02-25 07:41:49 +09:00
Kota Kanbe
03579126fd refactor(config): localize config used like a global variable (#1179)
* refactor(report): LocalFileWriter

* refactor -format-json

* refacotr: -format-one-email

* refactor: -format-csv

* refactor: -gzip

* refactor: -format-full-text

* refactor: -format-one-line-text

* refactor: -format-list

* refacotr: remove -to-* from config

* refactor: IgnoreGitHubDismissed

* refactor: GitHub

* refactor: IgnoreUnsocred

* refactor: diff

* refacotr: lang

* refacotr: cacheDBPath

* refactor: Remove config references

* refactor: ScanResults

* refacotr: constant pkg

* chore: comment

* refactor: scanner

* refactor: scanner

* refactor: serverapi.go

* refactor: serverapi

* refactor: change pkg structure

* refactor: serverapi.go

* chore: remove emtpy file

* fix(scan): remove -ssh-native-insecure option

* fix(scan): remove the deprecated option `keypassword`
2021-02-25 05:54:17 +09:00
Kota Kanbe
e3c27e1817 fix(saas): Don't overwrite config.toml if UUID already set (#1180)
* fix(saas): Don't overwrite config.toml if UUID already set

* add a test case
2021-02-19 06:42:22 +09:00
Richard Alloway
aeaf308679 Add test-case to verify proper version comparison in lessThan() (#1178)
* Add test-case to verify proper version comparison when either/both/neither of newVer and ovalmodels.Package contain "_<minor version>"

* Rename vera to newVer in Test_lessThan()

* Fix oval/util_test.go formatting (make fmt)

Co-authored-by: Richard Alloway (OpenLogic) <ralloway@perforce.com>
2021-02-14 05:30:07 +09:00
Kota Kanbe
f5e47bea40 chore: add a test-case to #1176 (#1177) 2021-02-12 13:46:29 +09:00
Richard Alloway
50cf13a7f2 Pass packInOVAL.Version through centOSVersionToRHEL() to remove the "_<point release>" portion so that packInOVAL.Version strings like 1.8.23-10.el7_9.1 become 1.8.23-10.el7.1 (same behavior as newVer, which now allows packInOVAL.Version and newVer to be directly compared). (#1176)
Co-authored-by: Richard Alloway (OpenLogic) <ralloway@perforce.com>
2021-02-12 13:33:36 +09:00
Kota Kanbe
abd8041772 fix(scan): yum ps warning for Red Hat family (#1174)
* fix(yumps): no debug message for known patterns

* refactor(scan): yum-ps

* refacotr(scan): pkgPs
2021-02-12 13:03:06 +09:00
Kota Kanbe
847c6438e7 chore: fix debug message (#1169) 2021-02-11 06:31:51 +09:00
Kota Kanbe
ef8309df27 chore: remove the heck binary (#1173) 2021-02-11 06:31:32 +09:00
sadayuki-matsuno
0dff6cf983 fix(gost/microsoft) add workaround into mitigation (#1170)
* fix(gost/microsoft) add workaround into mitigation

* fix(gost/microsoft) fix typo and delete workaround field from vulninfo
2021-02-10 19:37:28 +09:00
kazuminn
4c04acbd9e feat(report) : Differences between vulnerability patched items (#1157)
* add plusDiff() and minusDiff()
* add plusDiff minusDiff test

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2021-02-10 06:55:48 +09:00
Kota Kanbe
1c4f231572 fix(scan): ignore rpm -qf exit status (#1168) 2021-02-09 17:26:12 +09:00
Kota Kanbe
51b8e169d2 fix(scan): warning if lsof command not found (#1167) 2021-02-07 07:28:45 +09:00
Kota Kanbe
b4611ae9b7 fix(scan): fix yum-ps warning Failed to exec which -bash (#1166) 2021-02-07 07:23:12 +09:00
Kota Kanbe
cd6722017b fix(scan): yum-ps err Failed to find the package (#1165) 2021-02-06 08:42:06 +09:00
Kota Kanbe
290edffccf fix(log): output version to log for debugging purpose (#1163) 2021-02-04 07:47:56 +09:00
Kota Kanbe
64a6222bf9 fix(report): set created_at and updated_at of trivy to json (#1162) 2021-02-03 17:52:44 +09:00
Kota Kanbe
adb686b7c9 fix(report): set created_at and updated_at of wpscan.com to json (#1161) 2021-02-03 16:41:44 +09:00
Kota Kanbe
d4af341b0f fix(report): remove duplicated refreshing logic when report with -diff (#1160) 2021-02-03 07:37:19 +09:00
Kota Kanbe
fea7e93c8d chore: fix comment (#1158) 2021-02-02 06:06:49 +09:00
sadayuki-matsuno
8b6b8d0f2e feat(wordpress): define API limit exceed error for wpscan.com (#1155)
* feat(wordpress) specify wp err

* fix typo, chagne const name

Co-authored-by: Kota Kanbe <kotakanbe@gmail.com>
2021-01-30 09:53:41 +09:00
Kota Kanbe
4dcbd865cc fix(report): set http timeout 10 sec (#1154)
* fix(report): set http timeout 10 sec

* fix: add an error handling
2021-01-30 09:40:33 +09:00
Kota Kanbe
39b19444fe Merge branch 'master' of github.com:future-architect/vuls 2021-01-28 16:24:14 +09:00
Kota Kanbe
644d5a5462 fix(report): remove retry logic for wpscan.com (#1151)
* fix(saas) change saas upload s3 key (#1116)

* fix(report): remove retry logic for wpscan.com

Co-authored-by: sadayuki-matsuno <sadayuki.matsuno@gmail.com>
2021-01-28 16:21:33 +09:00
216 changed files with 39724 additions and 17425 deletions

View File

@@ -1,7 +1,6 @@
.dockerignore
Dockerfile
vendor/
cve.sqlite3*
oval.sqlite3*
*.sqlite3*
setup/
img/
img/

12
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "gomod" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
target-branch: "master"

View File

@@ -35,11 +35,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -50,7 +50,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -64,4 +64,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
uses: github/codeql-action/analyze@v2

69
.github/workflows/docker-publish.yml vendored Normal file
View File

@@ -0,0 +1,69 @@
name: Publish Docker image
on:
push:
branches:
- 'master'
tags:
- '*'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: vuls/vuls image meta
id: oss-meta
uses: docker/metadata-action@v4
with:
images: vuls/vuls
tags: |
type=ref,event=tag
- name: vuls/fvuls image meta
id: fvuls-meta
uses: docker/metadata-action@v4
with:
images: vuls/fvuls
tags: |
type=ref,event=tag
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: OSS image build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
push: true
tags: |
vuls/vuls:latest
${{ steps.oss-meta.outputs.tags }}
secrets: |
"github_token=${{ secrets.GITHUB_TOKEN }}"
platforms: linux/amd64,linux/arm64
- name: FutureVuls image build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./contrib/Dockerfile
push: true
tags: |
vuls/fvuls:latest
${{ steps.fvuls-meta.outputs.tags }}
secrets: |
"github_token=${{ secrets.GITHUB_TOKEN }}"
platforms: linux/amd64,linux/arm64

View File

@@ -11,12 +11,15 @@ jobs:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: golangci-lint
uses: golangci/golangci-lint-action@v2
- uses: actions/setup-go@v3
with:
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
version: v1.32
go-version: 1.18
- uses: actions/checkout@v3
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: v1.50.1
args: --timeout=10m
# Optional: working directory, useful for monorepos

View File

@@ -11,21 +11,22 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
-
name: Unshallow
run: git fetch --prune --unshallow
-
name: Set up Go
uses: actions/setup-go@v2
uses: actions/setup-go@v3
with:
go-version: 1.15
go-version-file: go.mod
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v2
uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser
version: latest
args: release --rm-dist
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -9,13 +9,13 @@ jobs:
steps:
- name: Set up Go 1.x
uses: actions/setup-go@v2
uses: actions/setup-go@v3
with:
go-version: 1.15.x
go-version: 1.18.x
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Test
run: make test

View File

@@ -1,22 +0,0 @@
name: go-mod-tidy-pr
on:
schedule:
- cron: "0 0 * * 1" # Weekly build
jobs:
go-mod-tidy-pr:
name: go-mod-tidy-pr
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run go-mod-tidy-pr
uses: sue445/go-mod-tidy-pr@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
git_user_name: kotakanbe
git_user_email: kotakanbe@gmail.com
go_version: 1.15.6

13
.gitignore vendored
View File

@@ -1,6 +1,6 @@
.vscode
*.txt
*.json
*.swp
*.sqlite3*
*.db
tags
@@ -9,9 +9,16 @@ coverage.out
issues/
vendor/
log/
results/
*config.toml
results
config.toml
!setup/docker/*
.DS_Store
dist/
.idea
vuls.*
vuls
!cmd/vuls
future-vuls
trivy-to-vuls
snmp2cpe
!snmp2cpe/

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "integration"]
path = integration
url = https://github.com/vulsio/integration

View File

@@ -1,14 +1,51 @@
name: golang-ci
run:
timeout: 10m
go: '1.18'
linters-settings:
errcheck:
revive:
# see https://github.com/mgechev/revive#available-rules for details.
ignore-generated-header: true
severity: warning
confidence: 0.8
rules:
- name: blank-imports
- name: context-as-argument
- name: context-keys-type
- name: dot-imports
- name: error-return
- name: error-strings
- name: error-naming
- name: exported
- name: if-return
- name: increment-decrement
- name: var-naming
- name: var-declaration
- name: package-comments
- name: range
- name: receiver-naming
- name: time-naming
- name: unexported-return
- name: indent-error-flow
- name: errorf
- name: empty-block
- name: superfluous-else
- name: unused-parameter
- name: unreachable-code
- name: redefines-builtin-id
staticcheck:
# https://staticcheck.io/docs/options#checks
checks: ["all", "-SA1019"]
# errcheck:
#exclude: /path/to/file.txt
linters:
disable-all: true
enable:
- goimports
- golint
- revive
- govet
- misspell
- errcheck

View File

@@ -1,16 +1,18 @@
project_name: vuls
env:
- GO111MODULE=on
release:
github:
owner: future-architect
name: vuls
builds:
- id: vuls
env:
- CGO_ENABLED=0
goos:
- linux
- windows
goarch:
- amd64
- arm64
main: ./cmd/vuls/main.go
flags:
- -a
@@ -23,6 +25,7 @@ builds:
- CGO_ENABLED=0
goos:
- linux
- windows
goarch:
- 386
- amd64
@@ -31,7 +34,8 @@ builds:
main: ./cmd/scanner/main.go
flags:
- -a
- -tags=scanner
tags:
- scanner
ldflags:
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
binary: vuls-scanner
@@ -41,12 +45,17 @@ builds:
- CGO_ENABLED=0
goos:
- linux
- windows
goarch:
- 386
- amd64
- arm
- arm64
tags:
- scanner
main: ./contrib/trivy/cmd/main.go
ldflags:
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
binary: trivy-to-vuls
- id: future-vuls
@@ -54,6 +63,7 @@ builds:
- CGO_ENABLED=0
goos:
- linux
- windows
goarch:
- 386
- amd64
@@ -61,10 +71,33 @@ builds:
- arm64
flags:
- -a
- -tags=scanner
tags:
- scanner
ldflags:
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
main: ./contrib/future-vuls/cmd/main.go
binary: future-vuls
- id: snmp2cpe
env:
- CGO_ENABLED=0
goos:
- linux
- windows
goarch:
- 386
- amd64
- arm
- arm64
flags:
- -a
tags:
- scanner
ldflags:
- -s -w -X github.com/future-architect/vuls/config.Version={{.Version}} -X github.com/future-architect/vuls/config.Revision={{.Commit}}-{{ .CommitDate }}
main: ./contrib/snmp2cpe/cmd/main.go
binary: snmp2cpe
archives:
- id: vuls
@@ -74,7 +107,6 @@ archives:
format: tar.gz
files:
- LICENSE
- NOTICE
- README*
- CHANGELOG.md
@@ -85,7 +117,6 @@ archives:
format: tar.gz
files:
- LICENSE
- NOTICE
- README*
- CHANGELOG.md
@@ -96,7 +127,6 @@ archives:
format: tar.gz
files:
- LICENSE
- NOTICE
- README*
- CHANGELOG.md
@@ -107,8 +137,18 @@ archives:
format: tar.gz
files:
- LICENSE
- NOTICE
- README*
- CHANGELOG.md
- id: snmp2cpe
name_template: '{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
builds:
- snmp2cpe
format: tar.gz
files:
- LICENSE
- README*
- CHANGELOG.md
snapshot:
name_template: SNAPSHOT-{{ .Commit }}

30
.revive.toml Normal file
View File

@@ -0,0 +1,30 @@
ignoreGeneratedHeader = false
severity = "warning"
confidence = 0.8
errorCode = 0
warningCode = 0
[rule.blank-imports]
[rule.context-as-argument]
[rule.context-keys-type]
[rule.dot-imports]
[rule.error-return]
[rule.error-strings]
[rule.error-naming]
[rule.exported]
[rule.if-return]
[rule.increment-decrement]
[rule.var-naming]
[rule.var-declaration]
[rule.package-comments]
[rule.range]
[rule.receiver-naming]
[rule.time-naming]
[rule.unexported-return]
[rule.indent-error-flow]
[rule.errorf]
[rule.empty-block]
[rule.superfluous-else]
[rule.unused-parameter]
[rule.unreachable-code]
[rule.redefines-builtin-id]

View File

@@ -10,10 +10,7 @@ ENV REPOSITORY github.com/future-architect/vuls
COPY . $GOPATH/src/$REPOSITORY
RUN cd $GOPATH/src/$REPOSITORY && make install
FROM alpine:3.11
MAINTAINER hikachan sadayuki-matsuno
FROM alpine:3.16
ENV LOGDIR /var/log/vuls
ENV WORKDIR /vuls
@@ -22,6 +19,7 @@ RUN apk add --no-cache \
openssh-client \
ca-certificates \
git \
nmap \
&& mkdir -p $WORKDIR $LOGDIR
COPY --from=builder /go/bin/vuls /usr/local/bin/

View File

@@ -17,66 +17,224 @@ PKGS = $(shell go list ./...)
VERSION := $(shell git describe --tags --abbrev=0)
REVISION := $(shell git rev-parse --short HEAD)
BUILDTIME := $(shell date "+%Y%m%d_%H%M%S")
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' \
-X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
GO := GO111MODULE=on go
CGO_UNABLED := CGO_ENABLED=0 go
GO_OFF := GO111MODULE=off go
LDFLAGS := -X 'github.com/future-architect/vuls/config.Version=$(VERSION)' -X 'github.com/future-architect/vuls/config.Revision=build-$(BUILDTIME)_$(REVISION)'
GO := CGO_ENABLED=0 go
all: build test
all: build
build: ./cmd/vuls/main.go pretest fmt
build: ./cmd/vuls/main.go
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
b: ./cmd/vuls/main.go
$(GO) build -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/vuls
install: ./cmd/vuls/main.go pretest fmt
install: ./cmd/vuls/main.go
$(GO) install -ldflags "$(LDFLAGS)" ./cmd/vuls
build-scanner: ./cmd/scanner/main.go pretest fmt
$(CGO_UNABLED) build -tags=scanner -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/scanner
build-scanner: ./cmd/scanner/main.go
$(GO) build -tags=scanner -a -ldflags "$(LDFLAGS)" -o vuls ./cmd/scanner
install-scanner: ./cmd/scanner/main.go pretest fmt
$(CGO_UNABLED) install -tags=scanner -ldflags "$(LDFLAGS)" ./cmd/scanner
install-scanner: ./cmd/scanner/main.go
$(GO) install -tags=scanner -ldflags "$(LDFLAGS)" ./cmd/scanner
lint:
$(GO_OFF) get -u golang.org/x/lint/golint
golint $(PKGS)
go install github.com/mgechev/revive@latest
revive -config ./.revive.toml -formatter plain $(PKGS)
vet:
echo $(PKGS) | xargs env $(GO) vet || exit;
golangci:
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
golangci-lint run
fmt:
gofmt -s -w $(SRCS)
mlint:
$(foreach file,$(SRCS),gometalinter $(file) || exit;)
fmtcheck:
$(foreach file,$(SRCS),gofmt -s -d $(file);)
pretest: lint vet fmtcheck
test:
test: pretest
$(GO) test -cover -v ./... || exit;
unused:
$(foreach pkg,$(PKGS),unused $(pkg);)
cov:
@ go get -v github.com/axw/gocov/gocov
@ go get golang.org/x/tools/cmd/cover
gocov test | gocov report
gocov test -v ./... | gocov report
clean:
echo $(PKGS) | xargs go clean || exit;
# trivy-to-vuls
build-trivy-to-vuls: pretest fmt
$(GO) build -o trivy-to-vuls contrib/trivy/cmd/*.go
build-trivy-to-vuls: ./contrib/trivy/cmd/main.go
$(GO) build -a -ldflags "$(LDFLAGS)" -o trivy-to-vuls ./contrib/trivy/cmd
# future-vuls
build-future-vuls: pretest fmt
$(GO) build -o future-vuls contrib/future-vuls/cmd/*.go
build-future-vuls: ./contrib/future-vuls/cmd/main.go
$(GO) build -a -ldflags "$(LDFLAGS)" -o future-vuls ./contrib/future-vuls/cmd
# snmp2cpe
build-snmp2cpe: ./contrib/snmp2cpe/cmd/main.go
$(GO) build -a -ldflags "$(LDFLAGS)" -o snmp2cpe ./contrib/snmp2cpe/cmd
# integration-test
BASE_DIR := '${PWD}/integration/results'
CURRENT := `find ${BASE_DIR} -type d -exec basename {} \; | sort -nr | head -n 1`
NOW=$(shell date '+%Y-%m-%dT%H-%M-%S%z')
NOW_JSON_DIR := '${BASE_DIR}/$(NOW)'
ONE_SEC_AFTER=$(shell date -d '+1 second' '+%Y-%m-%dT%H-%M-%S%z')
ONE_SEC_AFTER_JSON_DIR := '${BASE_DIR}/$(ONE_SEC_AFTER)'
LIBS := 'bundler' 'pip' 'pipenv' 'poetry' 'composer' 'npm' 'yarn' 'pnpm' 'cargo' 'gomod' 'gosum' 'gobinary' 'jar' 'pom' 'gradle' 'nuget-lock' 'nuget-config' 'dotnet-deps' 'conan' 'nvd_exact' 'nvd_rough' 'nvd_vendor_product' 'nvd_match_no_jvn' 'jvn_vendor_product' 'jvn_vendor_product_nover'
diff:
# git clone git@github.com:vulsio/vulsctl.git
# cd vulsctl/docker
# ./update-all.sh
# cd /path/to/vuls
# vim integration/int-config.toml
# ln -s vuls vuls.new
# ln -s oldvuls vuls.old
# make int
# (ex. test 10 times: for i in `seq 10`; do make int ARGS=-quiet ; done)
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
mv ${BASE_DIR} /tmp/${NOW}
endif
mkdir -p ${NOW_JSON_DIR}
sleep 1
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
cp ${BASE_DIR}/$(CURRENT)/*.json ${NOW_JSON_DIR}
- cp integration/data/results/*.json ${NOW_JSON_DIR}
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
sleep 1
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
cp ${BASE_DIR}/$(CURRENT)/*.json ${ONE_SEC_AFTER_JSON_DIR}
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${ONE_SEC_AFTER}
$(call sed-d)
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
$(call count-cve)
diff-redis:
# docker network create redis-nw
# docker run --name redis -d --network redis-nw -p 127.0.0.1:6379:6379 redis
# git clone git@github.com:vulsio/vulsctl.git
# cd vulsctl/docker
# ./update-all-redis.sh
# (or export DOCKER_NETWORK=redis-nw; cd /home/ubuntu/vulsctl/docker; ./update-all.sh --dbtype redis --dbpath "redis://redis/0")
# vim integration/int-redis-config.toml
# ln -s vuls vuls.new
# ln -s oldvuls vuls.old
# make int-redis
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
mv ${BASE_DIR} /tmp/${NOW}
endif
mkdir -p ${NOW_JSON_DIR}
sleep 1
./vuls.old scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
cp -f ${BASE_DIR}/$(CURRENT)/*.json ${NOW_JSON_DIR}
- cp integration/data/results/*.json ${NOW_JSON_DIR}
./vuls.old report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${NOW}
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
sleep 1
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
cp -f ${BASE_DIR}/$(CURRENT)/*.json ${ONE_SEC_AFTER_JSON_DIR}
- cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
$(call sed-d)
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
$(call count-cve)
diff-rdb-redis:
ifneq ($(shell ls -U1 ${BASE_DIR} | wc -l), 0)
mv ${BASE_DIR} /tmp/${NOW}
endif
mkdir -p ${NOW_JSON_DIR}
sleep 1
# new vs new
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
cp -f ${BASE_DIR}/$(CURRENT)/*.json ${NOW_JSON_DIR}
cp integration/data/results/*.json ${NOW_JSON_DIR}
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-config.toml ${NOW}
mkdir -p ${ONE_SEC_AFTER_JSON_DIR}
sleep 1
./vuls.new scan -config=./integration/int-config.toml --results-dir=${BASE_DIR} ${LIBS}
cp -f ${BASE_DIR}/$(CURRENT)/*.json ${ONE_SEC_AFTER_JSON_DIR}
cp integration/data/results/*.json ${ONE_SEC_AFTER_JSON_DIR}
./vuls.new report --format-json --refresh-cve --results-dir=${BASE_DIR} -config=./integration/int-redis-config.toml ${ONE_SEC_AFTER}
$(call sed-d)
- diff -c ${NOW_JSON_DIR} ${ONE_SEC_AFTER_JSON_DIR}
echo "old: ${NOW_JSON_DIR} , new: ${ONE_SEC_AFTER_JSON_DIR}"
$(call count-cve)
head= $(shell git rev-parse HEAD)
prev= $(shell git rev-parse HEAD^)
branch=$(shell git rev-parse --abbrev-ref HEAD)
build-integration:
git stash
# buld HEAD
git checkout ${head}
make build
mv -f ./vuls ./vuls.${head}
# HEAD^
git checkout ${prev}
make build
mv -f ./vuls ./vuls.${prev}
# master
git checkout master
make build
mv -f ./vuls ./vuls.master
# working tree
git checkout ${branch}
git stash apply stash@\{0\}
make build
# update integration data
git submodule update --remote
# for integration testing, vuls.new and vuls.old needed.
# ex)
# $ ln -s ./vuls ./vuls.new
# $ ln -s ./vuls.${head} ./vuls.old
# or
# $ ln -s ./vuls.${prev} ./vuls.old
# then
# $ make diff
# $ make diff-redis
# $ make diff-rdb-redis
define sed-d
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedAt/d' {} \;
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedAt/d' {} \;
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"Type":/d' {} \;
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/"SQLite3Path":/d' {} \;
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/reportedRevision/d' {} \;
find ${NOW_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
find ${ONE_SEC_AFTER_JSON_DIR} -type f -exec sed -i -e '/scannedRevision/d' {} \;
endef
define count-cve
for jsonfile in ${NOW_JSON_DIR}/*.json ; do \
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
done
for jsonfile in ${ONE_SEC_AFTER_JSON_DIR}/*.json ; do \
echo $$jsonfile; cat $$jsonfile | jq ".scannedCves | length" ; \
done
endef

153
LICENSE
View File

@@ -1,21 +1,23 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
@@ -24,34 +26,44 @@ them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
@@ -60,7 +72,7 @@ modification follow.
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
@@ -537,45 +549,35 @@ to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
@@ -629,33 +631,44 @@ to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
Vuls - Vulnerability Scanner
Copyright (C) 2016 Future Corporation , Japan.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
Vuls Copyright (C) 2016 Future Corporation , Japan.
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

2
NOTICE
View File

@@ -1,2 +0,0 @@
Vuls Copyright (C) 2016 Future Corporation , Japan.

View File

@@ -3,14 +3,13 @@
[![Slack](https://img.shields.io/badge/slack-join-blue.svg)](http://goo.gl/forms/xm5KFo35tu)
[![License](https://img.shields.io/github/license/future-architect/vuls.svg?style=flat-square)](https://github.com/future-architect/vuls/blob/master/LICENSE)
[![Build Status](https://travis-ci.org/future-architect/vuls.svg?branch=master)](https://travis-ci.org/future-architect/vuls)
[![Go Report Card](https://goreportcard.com/badge/github.com/future-architect/vuls)](https://goreportcard.com/report/github.com/future-architect/vuls)
[![Contributors](https://img.shields.io/github/contributors/future-architect/vuls.svg)](https://github.com/future-architect/vuls/graphs/contributors)
![Vuls-logo](img/vuls_logo.png)
Vulnerability scanner for Linux/FreeBSD, agent-less, written in Go.
We have a slack team. [Join slack team](http://goo.gl/forms/xm5KFo35tu)
Vulnerability scanner for Linux/FreeBSD, agent-less, written in Go.
We have a slack team. [Join slack team](https://join.slack.com/t/vuls-github/shared_invite/zt-1fculjwj4-6nex2JNE7DpOSiKZ1ztDFw)
Twitter: [@vuls_en](https://twitter.com/vuls_en)
![Vuls-Abstract](img/vuls-abstract.png)
@@ -48,10 +47,11 @@ Vuls is a tool created to solve the problems listed above. It has the following
### Scan for any vulnerabilities in Linux/FreeBSD Server
[Supports major Linux/FreeBSD](https://vuls.io/docs/en/supported-os.html)
[Supports major Linux/FreeBSD/Windows](https://vuls.io/docs/en/supported-os.html)
- Alpine, Amazon Linux, CentOS, Debian, Oracle Linux, Raspbian, RHEL, SUSE Enterprise Linux, and Ubuntu
- Alpine, Amazon Linux, CentOS, AlmaLinux, Rocky Linux, Debian, Oracle Linux, Raspbian, RHEL, openSUSE, openSUSE Leap, SUSE Enterprise Linux, Fedora, and Ubuntu
- FreeBSD
- Windows
- Cloud, on-premise, Running Docker Container
### High-quality scan
@@ -71,6 +71,8 @@ Vuls is a tool created to solve the problems listed above. It has the following
- [Alpine-secdb](https://git.alpinelinux.org/cgit/alpine-secdb/)
- [Red Hat Security Advisories](https://access.redhat.com/security/security-updates/)
- [Debian Security Bug Tracker](https://security-tracker.debian.org/tracker/)
- [Ubuntu CVE Tracker](https://people.canonical.com/~ubuntu-security/cve/)
- [Microsoft CVRF](https://api.msrc.microsoft.com/cvrf/v2.0/swagger/index)
- Commands(yum, zypper, pkg-audit)
- RHSA / ALAS / ELSA / FreeBSD-SA
@@ -79,17 +81,22 @@ Vuls is a tool created to solve the problems listed above. It has the following
- PoC, Exploit
- [Exploit Database](https://www.exploit-db.com/)
- [Metasploit-Framework modules](https://www.rapid7.com/db/?q=&type=metasploit)
- [qazbnm456/awesome-cve-poc](https://github.com/qazbnm456/awesome-cve-poc)
- [nomi-sec/PoC-in-GitHub](https://github.com/nomi-sec/PoC-in-GitHub)
- [gmatuz/inthewilddb](https://github.com/gmatuz/inthewilddb)
- CERT
- [US-CERT](https://www.us-cert.gov/ncas/alerts)
- [JPCERT](http://www.jpcert.or.jp/at/2019.html)
- CISA(Cybersecurity & Infrastructure Security Agency)
- [Known Exploited Vulnerabilities Catalog](https://www.cisa.gov/known-exploited-vulnerabilities-catalog)
- Cyber Threat Intelligence(MITRE ATT&CK and CAPEC)
- [mitre/cti](https://github.com/mitre/cti)
- Libraries
- [Node.js Security Working Group](https://github.com/nodejs/security-wg)
- [Ruby Advisory Database](https://github.com/rubysec/ruby-advisory-db)
- [Safety DB(Python)](https://github.com/pyupio/safety-db)
- [PHP Security Advisories Database](https://github.com/FriendsOfPHP/security-advisories)
- [RustSec Advisory Database](https://github.com/RustSec/advisory-db)
- [aquasecurity/vuln-list](https://github.com/aquasecurity/vuln-list)
- WordPress
- [wpscan](https://wpscan.com/api)
@@ -100,15 +107,15 @@ Vuls is a tool created to solve the problems listed above. It has the following
- Scan without root privilege, no dependencies
- Almost no load on the scan target server
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, Fedora, and Ubuntu)
[Fast Root Scan](https://vuls.io/docs/en/architecture-fast-root-scan.html)
- Scan with root privilege
- Almost no load on the scan target server
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Oracle Linux, and RedHat)
- Detect processes affected by update using yum-ps (Amazon Linux, CentOS, Alma Linux, Rocky Linux, Oracle Linux, Fedora, and RedHat)
- Detect processes which updated before but not restarting yet using checkrestart of debian-goodies (Debian and Ubuntu)
- Offline mode scan with no internet access. (CentOS, Debian, Oracle Linux, Red Hat, and Ubuntu)
- Offline mode scan with no internet access. (CentOS, Alma Linux, Rocky Linux, Debian, Oracle Linux, Red Hat, Fedora, and Ubuntu)
### [Remote, Local scan mode, Server mode](https://vuls.io/docs/en/architecture-remote-local.html)
@@ -183,11 +190,14 @@ see [vulsdoc](https://vuls.io/docs/en/how-to-contribute.html)
----
## Stargazers over time
## Sponsors
[![Stargazers over time](https://starcharts.herokuapp.com/future-architect/vuls.svg)](https://starcharts.herokuapp.com/future-architect/vuls)
| | |
| ------------- | ------------- |
| <a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=vuls"><img src="img/sponsor/tines.png" align="left" width="600px" ></a> | Tines is no-code automation for security teams. Build powerful, reliable workflows without a development team. |
| <a href="https://www.sakura.ad.jp/"><img src="https://vuls.io/img/icons/sakura.svg" align="left" width="600px" ></a> | SAKURA internet Inc. is an Internet company founded in 1996. We provide cloud computing services such as "Sakura's Shared Server", "Sakura's VPS", and "Sakura's Cloud" to meet the needs of a wide range of customers, from individuals and corporations to the education and public sectors, using its own data centers in Japan. Based on the philosophy of "changing what you want to do into what you can do," we offer DX solutions for all fields. |
-----;
----
## License

9
SECURITY.md Normal file
View File

@@ -0,0 +1,9 @@
# Security Policy
## Supported Versions
Only the latest version is supported.
## Reporting a Vulnerability
Email kotakanbe@gmail.com

17
cache/bolt.go vendored
View File

@@ -4,22 +4,23 @@ import (
"encoding/json"
"time"
"github.com/boltdb/bolt"
"github.com/future-architect/vuls/util"
"github.com/sirupsen/logrus"
bolt "go.etcd.io/bbolt"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/util"
)
// Bolt holds a pointer of bolt.DB
// boltdb is used to store a cache of Changelogs of Ubuntu/Debian
type Bolt struct {
Path string
Log *logrus.Entry
Log logging.Logger
db *bolt.DB
}
// SetupBolt opens a boltdb and creates a meta bucket if not exists.
func SetupBolt(path string, l *logrus.Entry) error {
func SetupBolt(path string, l logging.Logger) error {
l.Infof("Open boltDB: %s", path)
db, err := bolt.Open(path, 0600, nil)
if err != nil {
@@ -47,7 +48,7 @@ func (b Bolt) Close() error {
return b.db.Close()
}
// CreateBucketIfNotExists creates a buket that is specified by arg.
// CreateBucketIfNotExists creates a bucket that is specified by arg.
func (b *Bolt) createBucketIfNotExists(name string) error {
return b.db.Update(func(tx *bolt.Tx) error {
_, err := tx.CreateBucketIfNotExists([]byte(name))
@@ -93,7 +94,7 @@ func (b Bolt) RefreshMeta(meta Meta) error {
})
}
// EnsureBuckets puts a Meta information and create a buket that holds changelogs.
// EnsureBuckets puts a Meta information and create a bucket that holds changelogs.
func (b Bolt) EnsureBuckets(meta Meta) error {
jsonBytes, err := json.Marshal(meta)
if err != nil {
@@ -159,7 +160,7 @@ func (b Bolt) GetChangelog(servername, packName string) (changelog string, err e
return
}
// PutChangelog put the changelgo of specified packName into the Bucket
// PutChangelog put the changelog of specified packName into the Bucket
func (b Bolt) PutChangelog(servername, packName, changelog string) error {
return b.db.Update(func(tx *bolt.Tx) error {
bkt := tx.Bucket([]byte(servername))

11
cache/bolt_test.go vendored
View File

@@ -5,10 +5,11 @@ import (
"reflect"
"testing"
"github.com/boltdb/bolt"
bolt "go.etcd.io/bbolt"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/sirupsen/logrus"
)
const path = "/tmp/vuls-test-cache-11111111.db"
@@ -29,7 +30,7 @@ var meta = Meta{
}
func TestSetupBolt(t *testing.T) {
log := logrus.NewEntry(&logrus.Logger{})
log := logging.NewNormalLogger()
err := SetupBolt(path, log)
if err != nil {
t.Errorf("Failed to setup bolt: %s", err)
@@ -57,7 +58,7 @@ func TestSetupBolt(t *testing.T) {
}
func TestEnsureBuckets(t *testing.T) {
log := logrus.NewEntry(&logrus.Logger{})
log := logging.NewNormalLogger()
if err := SetupBolt(path, log); err != nil {
t.Errorf("Failed to setup bolt: %s", err)
}
@@ -98,7 +99,7 @@ func TestEnsureBuckets(t *testing.T) {
func TestPutGetChangelog(t *testing.T) {
clog := "changelog-text"
log := logrus.NewEntry(&logrus.Logger{})
log := logging.NewNormalLogger()
if err := SetupBolt(path, log); err != nil {
t.Errorf("Failed to setup bolt: %s", err)
}

View File

@@ -29,7 +29,7 @@ func main() {
flag.Parse()
if *v {
fmt.Printf("vuls %s %s\n", config.Version, config.Revision)
fmt.Printf("vuls-%s-%s\n", config.Version, config.Revision)
os.Exit(int(subcommands.ExitSuccess))
}

30
config/awsconf.go Normal file
View File

@@ -0,0 +1,30 @@
package config
// AWSConf is aws config
type AWSConf struct {
// AWS profile to use
Profile string `json:"profile"`
// AWS region to use
Region string `json:"region"`
// S3 bucket name
S3Bucket string `json:"s3Bucket"`
// /bucket/path/to/results
S3ResultsDir string `json:"s3ResultsDir"`
// The Server-side encryption algorithm used when storing the reports in S3 (e.g., AES256, aws:kms).
S3ServerSideEncryption string `json:"s3ServerSideEncryption"`
Enabled bool `toml:"-" json:"-"`
}
// Validate configuration
func (c *AWSConf) Validate() (errs []error) {
// TODO
if !c.Enabled {
return
}
return
}

46
config/azureconf.go Normal file
View File

@@ -0,0 +1,46 @@
package config
import (
"os"
"golang.org/x/xerrors"
)
// AzureConf is azure config
type AzureConf struct {
// Azure account name to use. AZURE_STORAGE_ACCOUNT environment variable is used if not specified
AccountName string `json:"accountName"`
// Azure account key to use. AZURE_STORAGE_ACCESS_KEY environment variable is used if not specified
AccountKey string `json:"-"`
// Azure storage container name
ContainerName string `json:"containerName"`
Enabled bool `toml:"-" json:"-"`
}
const (
azureAccount = "AZURE_STORAGE_ACCOUNT"
azureKey = "AZURE_STORAGE_ACCESS_KEY"
)
// Validate configuration
func (c *AzureConf) Validate() (errs []error) {
if !c.Enabled {
return
}
// overwrite if env var is not empty
if os.Getenv(azureAccount) != "" {
c.AccountName = os.Getenv(azureAccount)
}
if os.Getenv(azureKey) != "" {
c.AccountKey = os.Getenv(azureKey)
}
if c.ContainerName == "" {
errs = append(errs, xerrors.Errorf("Azure storage container name is required"))
}
return
}

View File

@@ -9,11 +9,12 @@ import (
type ChatWorkConf struct {
APIToken string `json:"-"`
Room string `json:"-"`
Enabled bool `toml:"-" json:"-"`
}
// Validate validates configuration
func (c *ChatWorkConf) Validate() (errs []error) {
if !Conf.ToChatWork {
if !c.Enabled {
return
}
if len(c.Room) == 0 {

View File

@@ -1,15 +1,18 @@
//go:build !windows
package config
import (
"fmt"
"os"
"runtime"
"strconv"
"strings"
"github.com/asaskevich/govalidator"
log "github.com/sirupsen/logrus"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/logging"
)
// Version of Vuls
@@ -21,99 +24,84 @@ var Revision string
// Conf has Configuration
var Conf Config
//Config is struct of Configuration
// Config is struct of Configuration
type Config struct {
Debug bool `json:"debug,omitempty"`
DebugSQL bool `json:"debugSQL,omitempty"`
Lang string `json:"lang,omitempty"`
logging.LogOpts
// scan, report
HTTPProxy string `valid:"url" json:"httpProxy,omitempty"`
LogDir string `json:"logDir,omitempty"`
ResultsDir string `json:"resultsDir,omitempty"`
Pipe bool `json:"pipe,omitempty"`
Quiet bool `json:"quiet,omitempty"`
NoProgress bool `json:"noProgress,omitempty"`
SSHNative bool `json:"sshNative,omitempty"`
Vvv bool `json:"vvv,omitempty"`
Default ServerInfo `json:"default,omitempty"`
Servers map[string]ServerInfo `json:"servers,omitempty"`
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
Default ServerInfo `json:"default,omitempty"`
Servers map[string]ServerInfo `json:"servers,omitempty"`
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
IgnoreGitHubDismissed bool `json:"ignore_git_hub_dismissed,omitempty"`
CacheDBPath string `json:"cacheDBPath,omitempty"`
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
ScanOpts
// report
CveDict GoCveDictConf `json:"cveDict,omitempty"`
OvalDict GovalDictConf `json:"ovalDict,omitempty"`
Gost GostConf `json:"gost,omitempty"`
Exploit ExploitConf `json:"exploit,omitempty"`
Metasploit MetasploitConf `json:"metasploit,omitempty"`
KEVuln KEVulnConf `json:"kevuln,omitempty"`
Cti CtiConf `json:"cti,omitempty"`
Slack SlackConf `json:"-"`
EMail SMTPConf `json:"-"`
HTTP HTTPConf `json:"-"`
Syslog SyslogConf `json:"-"`
AWS AWSConf `json:"-"`
Azure AzureConf `json:"-"`
ChatWork ChatWorkConf `json:"-"`
Telegram TelegramConf `json:"-"`
Slack SlackConf `json:"-"`
EMail SMTPConf `json:"-"`
HTTP HTTPConf `json:"-"`
Syslog SyslogConf `json:"-"`
AWS AWSConf `json:"-"`
Azure AzureConf `json:"-"`
ChatWork ChatWorkConf `json:"-"`
GoogleChat GoogleChatConf `json:"-"`
Telegram TelegramConf `json:"-"`
WpScan WpScanConf `json:"-"`
Saas SaasConf `json:"-"`
WpScan WpScanConf `json:"WpScan,omitempty"`
ReportOpts
}
Saas SaasConf `json:"-"`
DetectIPS bool `json:"detectIps,omitempty"`
// ReportConf is an interface to Validate Report Config
type ReportConf interface {
Validate() []error
}
RefreshCve bool `json:"refreshCve,omitempty"`
ToSlack bool `json:"toSlack,omitempty"`
ToChatWork bool `json:"toChatWork,omitempty"`
ToTelegram bool `json:"ToTelegram,omitempty"`
ToEmail bool `json:"toEmail,omitempty"`
ToSyslog bool `json:"toSyslog,omitempty"`
ToLocalFile bool `json:"toLocalFile,omitempty"`
ToS3 bool `json:"toS3,omitempty"`
ToAzureBlob bool `json:"toAzureBlob,omitempty"`
ToHTTP bool `json:"toHTTP,omitempty"`
FormatJSON bool `json:"formatJSON,omitempty"`
FormatOneEMail bool `json:"formatOneEMail,omitempty"`
FormatOneLineText bool `json:"formatOneLineText,omitempty"`
FormatList bool `json:"formatList,omitempty"`
FormatFullText bool `json:"formatFullText,omitempty"`
FormatCsvList bool `json:"formatCsvList,omitempty"`
GZIP bool `json:"gzip,omitempty"`
Diff bool `json:"diff,omitempty"`
// ScanOpts is options for scan
type ScanOpts struct {
Vvv bool `json:"vvv,omitempty"`
}
// ReportOpts is options for report
type ReportOpts struct {
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
ConfidenceScoreOver int `json:"confidenceScoreOver,omitempty"`
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
NoProgress bool `json:"noProgress,omitempty"`
RefreshCve bool `json:"refreshCve,omitempty"`
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
DiffPlus bool `json:"diffPlus,omitempty"`
DiffMinus bool `json:"diffMinus,omitempty"`
Diff bool `json:"diff,omitempty"`
Lang string `json:"lang,omitempty"`
}
// ValidateOnConfigtest validates
func (c Config) ValidateOnConfigtest() bool {
errs := c.checkSSHKeyExist()
if runtime.GOOS == "windows" && !c.SSHNative {
errs = append(errs, xerrors.New("-ssh-native-insecure is needed on windows"))
}
_, err := govalidator.ValidateStruct(c)
if err != nil {
if _, err := govalidator.ValidateStruct(c); err != nil {
errs = append(errs, err)
}
for _, err := range errs {
log.Error(err)
logging.Log.Error(err)
}
return len(errs) == 0
}
// ValidateOnScan validates configuration
func (c Config) ValidateOnScan() bool {
errs := c.checkSSHKeyExist()
if runtime.GOOS == "windows" && !c.SSHNative {
errs = append(errs, xerrors.New("-ssh-native-insecure is needed on windows"))
}
if len(c.ResultsDir) != 0 {
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
errs = append(errs, xerrors.Errorf(
@@ -121,29 +109,31 @@ func (c Config) ValidateOnScan() bool {
}
}
if len(c.CacheDBPath) != 0 {
if ok, _ := govalidator.IsFilePath(c.CacheDBPath); !ok {
errs = append(errs, xerrors.Errorf(
"Cache DB path must be a *Absolute* file path. -cache-dbpath: %s",
c.CacheDBPath))
}
}
_, err := govalidator.ValidateStruct(c)
if err != nil {
if _, err := govalidator.ValidateStruct(c); err != nil {
errs = append(errs, err)
}
for _, err := range errs {
log.Error(err)
for _, server := range c.Servers {
if !server.Module.IsScanPort() {
continue
}
if es := server.PortScan.Validate(); 0 < len(es) {
errs = append(errs, es...)
}
if es := server.Windows.Validate(); 0 < len(es) {
errs = append(errs, es...)
}
}
for _, err := range errs {
logging.Log.Error(err)
}
return len(errs) == 0
}
func (c Config) checkSSHKeyExist() (errs []error) {
for serverName, v := range c.Servers {
if v.Type == ServerTypePseudo {
if v.Type == constant.ServerTypePseudo {
continue
}
if v.KeyPath != "" {
@@ -156,39 +146,8 @@ func (c Config) checkSSHKeyExist() (errs []error) {
return errs
}
// ValidateOnReportDB validates configuration
func (c Config) ValidateOnReportDB() bool {
errs := []error{}
if err := validateDB("cvedb", c.CveDict.Type, c.CveDict.SQLite3Path, c.CveDict.URL); err != nil {
errs = append(errs, err)
}
if err := validateDB("ovaldb", c.OvalDict.Type, c.OvalDict.SQLite3Path, c.OvalDict.URL); err != nil {
errs = append(errs, err)
}
if err := validateDB("gostdb", c.Gost.Type, c.Gost.SQLite3Path, c.Gost.URL); err != nil {
errs = append(errs, err)
}
if err := validateDB("exploitdb", c.Exploit.Type, c.Exploit.SQLite3Path, c.Exploit.URL); err != nil {
errs = append(errs, err)
}
if err := validateDB("msfdb", c.Metasploit.Type, c.Metasploit.SQLite3Path, c.Metasploit.URL); err != nil {
errs = append(errs, err)
}
for _, err := range errs {
log.Error(err)
}
return len(errs) == 0
}
// ValidateOnReport validates configuration
func (c Config) ValidateOnReport() bool {
func (c *Config) ValidateOnReport() bool {
errs := []error{}
if len(c.ResultsDir) != 0 {
@@ -203,54 +162,41 @@ func (c Config) ValidateOnReport() bool {
errs = append(errs, err)
}
if mailerrs := c.EMail.Validate(); 0 < len(mailerrs) {
errs = append(errs, mailerrs...)
}
if slackerrs := c.Slack.Validate(); 0 < len(slackerrs) {
errs = append(errs, slackerrs...)
}
if chatworkerrs := c.ChatWork.Validate(); 0 < len(chatworkerrs) {
errs = append(errs, chatworkerrs...)
}
if telegramerrs := c.Telegram.Validate(); 0 < len(telegramerrs) {
errs = append(errs, telegramerrs...)
}
if syslogerrs := c.Syslog.Validate(); 0 < len(syslogerrs) {
errs = append(errs, syslogerrs...)
}
if httperrs := c.HTTP.Validate(); 0 < len(httperrs) {
errs = append(errs, httperrs...)
}
for _, err := range errs {
log.Error(err)
}
return len(errs) == 0
}
// ValidateOnTui validates configuration
func (c Config) ValidateOnTui() bool {
errs := []error{}
if len(c.ResultsDir) != 0 {
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
errs = append(errs, xerrors.Errorf(
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
for _, rc := range []ReportConf{
&c.EMail,
&c.Slack,
&c.ChatWork,
&c.GoogleChat,
&c.Telegram,
&c.Syslog,
&c.HTTP,
&c.AWS,
&c.Azure,
} {
if es := rc.Validate(); 0 < len(es) {
errs = append(errs, es...)
}
}
if err := validateDB("cvedb", c.CveDict.Type, c.CveDict.SQLite3Path, c.CveDict.URL); err != nil {
errs = append(errs, err)
for _, cnf := range []VulnDictInterface{
&Conf.CveDict,
&Conf.OvalDict,
&Conf.Gost,
&Conf.Exploit,
&Conf.Metasploit,
&Conf.KEVuln,
&Conf.Cti,
} {
if err := cnf.Validate(); err != nil {
errs = append(errs, xerrors.Errorf("Failed to validate %s: %+v", cnf.GetName(), err))
}
if err := cnf.CheckHTTPHealth(); err != nil {
errs = append(errs, xerrors.Errorf("Run %s as server mode before reporting: %+v", cnf.GetName(), err))
}
}
for _, err := range errs {
log.Error(err)
logging.Log.Error(err)
}
return len(errs) == 0
@@ -260,83 +206,11 @@ func (c Config) ValidateOnTui() bool {
func (c Config) ValidateOnSaaS() bool {
saaserrs := c.Saas.Validate()
for _, err := range saaserrs {
log.Error("Failed to validate SaaS conf: %+w", err)
logging.Log.Error("Failed to validate SaaS conf: %+w", err)
}
return len(saaserrs) == 0
}
// validateDB validates configuration
func validateDB(dictionaryDBName, dbType, dbPath, dbURL string) error {
log.Infof("-%s-type: %s, -%s-url: %s, -%s-path: %s",
dictionaryDBName, dbType, dictionaryDBName, dbURL, dictionaryDBName, dbPath)
switch dbType {
case "sqlite3":
if dbURL != "" {
return xerrors.Errorf("To use SQLite3, specify -%s-type=sqlite3 and -%s-path. To use as http server mode, specify -%s-type=http and -%s-url",
dictionaryDBName, dictionaryDBName, dictionaryDBName, dictionaryDBName)
}
if ok, _ := govalidator.IsFilePath(dbPath); !ok {
return xerrors.Errorf("SQLite3 path must be a *Absolute* file path. -%s-path: %s",
dictionaryDBName, dbPath)
}
case "mysql":
if dbURL == "" {
return xerrors.Errorf(`MySQL connection string is needed. -%s-url="user:pass@tcp(localhost:3306)/dbname"`,
dictionaryDBName)
}
case "postgres":
if dbURL == "" {
return xerrors.Errorf(`PostgreSQL connection string is needed. -%s-url="host=myhost user=user dbname=dbname sslmode=disable password=password"`,
dictionaryDBName)
}
case "redis":
if dbURL == "" {
return xerrors.Errorf(`Redis connection string is needed. -%s-url="redis://localhost/0"`,
dictionaryDBName)
}
case "http":
if dbURL == "" {
return xerrors.Errorf(`URL is needed. -%s-url="http://localhost:1323"`,
dictionaryDBName)
}
default:
return xerrors.Errorf("%s type must be either 'sqlite3', 'mysql', 'postgres', 'redis' or 'http'. -%s-type: %s",
dictionaryDBName, dictionaryDBName, dbType)
}
return nil
}
// AWSConf is aws config
type AWSConf struct {
// AWS profile to use
Profile string `json:"profile"`
// AWS region to use
Region string `json:"region"`
// S3 bucket name
S3Bucket string `json:"s3Bucket"`
// /bucket/path/to/results
S3ResultsDir string `json:"s3ResultsDir"`
// The Server-side encryption algorithm used when storing the reports in S3 (e.g., AES256, aws:kms).
S3ServerSideEncryption string `json:"s3ServerSideEncryption"`
}
// AzureConf is azure config
type AzureConf struct {
// Azure account name to use. AZURE_STORAGE_ACCOUNT environment variable is used if not specified
AccountName string `json:"accountName"`
// Azure account key to use. AZURE_STORAGE_ACCESS_KEY environment variable is used if not specified
AccountKey string `json:"-"`
// Azure storage container name
ContainerName string `json:"containerName"`
}
// WpScanConf is wpscan.com config
type WpScanConf struct {
Token string `toml:"token,omitempty" json:"-"`
@@ -345,14 +219,15 @@ type WpScanConf struct {
// ServerInfo has SSH Info, additional CPE packages to scan.
type ServerInfo struct {
BaseName string `toml:"-" json:"-"`
ServerName string `toml:"-" json:"serverName,omitempty"`
User string `toml:"user,omitempty" json:"user,omitempty"`
Host string `toml:"host,omitempty" json:"host,omitempty"`
IgnoreIPAddresses []string `toml:"ignoreIPAddresses,omitempty" json:"ignoreIPAddresses,omitempty"`
JumpServer []string `toml:"jumpServer,omitempty" json:"jumpServer,omitempty"`
Port string `toml:"port,omitempty" json:"port,omitempty"`
SSHConfigPath string `toml:"sshConfigPath,omitempty" json:"sshConfigPath,omitempty"`
KeyPath string `toml:"keyPath,omitempty" json:"keyPath,omitempty"`
KeyPassword string `json:"-" toml:"-"`
CpeNames []string `toml:"cpeNames,omitempty" json:"cpeNames,omitempty"`
ScanMode []string `toml:"scanMode,omitempty" json:"scanMode,omitempty"`
ScanModules []string `toml:"scanModules,omitempty" json:"scanModules,omitempty"`
@@ -367,16 +242,20 @@ type ServerInfo struct {
GitHubRepos map[string]GitHubConf `toml:"githubs" json:"githubs,omitempty"` // key: owner/repo
UUIDs map[string]string `toml:"uuids,omitempty" json:"uuids,omitempty"`
Memo string `toml:"memo,omitempty" json:"memo,omitempty"`
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, RHEL, Amazon
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, Alma, Rocky, RHEL, Amazon
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
FindLockDirs []string `toml:"findLockDirs,omitempty" json:"findLockDirs,omitempty"`
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
IPSIdentifiers map[IPS]string `toml:"-" json:"ipsIdentifiers,omitempty"`
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
PortScan *PortScanConf `toml:"portscan,omitempty" json:"portscan,omitempty"`
Windows *WindowsConf `toml:"windows,omitempty" json:"windows,omitempty"`
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
IPSIdentifiers map[string]string `toml:"-" json:"ipsIdentifiers,omitempty"`
// internal use
LogMsgAnsiColor string `toml:"-" json:"-"` // DebugLog Color
@@ -399,6 +278,7 @@ type WordPressConf struct {
OSUser string `toml:"osUser,omitempty" json:"osUser,omitempty"`
DocRoot string `toml:"docRoot,omitempty" json:"docRoot,omitempty"`
CmdPath string `toml:"cmdPath,omitempty" json:"cmdPath,omitempty"`
NoSudo bool `toml:"noSudo,omitempty" json:"noSudo,omitempty"`
}
// IsZero return whether this struct is not specified in config.toml
@@ -408,7 +288,8 @@ func (cnf WordPressConf) IsZero() bool {
// GitHubConf is used for GitHub Security Alerts
type GitHubConf struct {
Token string `json:"-"`
Token string `json:"-"`
IgnoreGitHubDismissed bool `json:"ignoreGitHubDismissed,omitempty"`
}
// GetServerName returns ServerName if this serverInfo is about host.
@@ -432,14 +313,24 @@ func (l Distro) String() string {
// MajorVersion returns Major version
func (l Distro) MajorVersion() (int, error) {
if l.Family == Amazon {
if isAmazonLinux1(l.Release) {
return 1, nil
switch l.Family {
case constant.Amazon:
return strconv.Atoi(getAmazonLinuxVersion(l.Release))
case constant.CentOS:
if 0 < len(l.Release) {
return strconv.Atoi(strings.Split(strings.TrimPrefix(l.Release, "stream"), ".")[0])
}
case constant.OpenSUSE:
if l.Release != "" {
if l.Release == "tumbleweed" {
return 0, nil
}
return strconv.Atoi(strings.Split(l.Release, ".")[0])
}
default:
if 0 < len(l.Release) {
return strconv.Atoi(strings.Split(l.Release, ".")[0])
}
return 2, nil
}
if 0 < len(l.Release) {
return strconv.Atoi(strings.Split(l.Release, ".")[0])
}
return 0, xerrors.New("Release is empty")
}
@@ -460,8 +351,3 @@ type Container struct {
Name string
Image string
}
// VulnSrcConf is an interface of vulnsrc
type VulnSrcConf interface {
CheckHTTPHealth() error
}

View File

@@ -2,6 +2,8 @@ package config
import (
"testing"
. "github.com/future-architect/vuls/constant"
)
func TestSyslogConfValidate(t *testing.T) {
@@ -55,7 +57,7 @@ func TestSyslogConfValidate(t *testing.T) {
}
for i, tt := range tests {
Conf.ToSyslog = true
tt.conf.Enabled = true
errs := tt.conf.Validate()
if len(errs) != tt.expectedErrLength {
t.Errorf("test: %d, expected %d, actual %d", i, tt.expectedErrLength, len(errs))
@@ -68,6 +70,13 @@ func TestDistro_MajorVersion(t *testing.T) {
in Distro
out int
}{
{
in: Distro{
Family: Amazon,
Release: "2022 (Amazon Linux)",
},
out: 2022,
},
{
in: Distro{
Family: Amazon,

351
config/config_windows.go Normal file
View File

@@ -0,0 +1,351 @@
//go:build windows
package config
import (
"fmt"
"os"
"strconv"
"strings"
"github.com/asaskevich/govalidator"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/logging"
)
// Version of Vuls
var Version = "`make build` or `make install` will show the version"
// Revision of Git
var Revision string
// Conf has Configuration
var Conf Config
// Config is struct of Configuration
type Config struct {
logging.LogOpts
// scan, report
HTTPProxy string `valid:"url" json:"httpProxy,omitempty"`
ResultsDir string `json:"resultsDir,omitempty"`
Pipe bool `json:"pipe,omitempty"`
Default ServerInfo `json:"default,omitempty"`
Servers map[string]ServerInfo `json:"servers,omitempty"`
ScanOpts
// report
CveDict GoCveDictConf `json:"cveDict,omitempty"`
OvalDict GovalDictConf `json:"ovalDict,omitempty"`
Gost GostConf `json:"gost,omitempty"`
Exploit ExploitConf `json:"exploit,omitempty"`
Metasploit MetasploitConf `json:"metasploit,omitempty"`
KEVuln KEVulnConf `json:"kevuln,omitempty"`
Cti CtiConf `json:"cti,omitempty"`
Slack SlackConf `json:"-"`
EMail SMTPConf `json:"-"`
HTTP HTTPConf `json:"-"`
AWS AWSConf `json:"-"`
Azure AzureConf `json:"-"`
ChatWork ChatWorkConf `json:"-"`
GoogleChat GoogleChatConf `json:"-"`
Telegram TelegramConf `json:"-"`
WpScan WpScanConf `json:"-"`
Saas SaasConf `json:"-"`
ReportOpts
}
// ReportConf is an interface to Validate Report Config
type ReportConf interface {
Validate() []error
}
// ScanOpts is options for scan
type ScanOpts struct {
Vvv bool `json:"vvv,omitempty"`
}
// ReportOpts is options for report
type ReportOpts struct {
CvssScoreOver float64 `json:"cvssScoreOver,omitempty"`
ConfidenceScoreOver int `json:"confidenceScoreOver,omitempty"`
TrivyCacheDBDir string `json:"trivyCacheDBDir,omitempty"`
NoProgress bool `json:"noProgress,omitempty"`
RefreshCve bool `json:"refreshCve,omitempty"`
IgnoreUnfixed bool `json:"ignoreUnfixed,omitempty"`
IgnoreUnscoredCves bool `json:"ignoreUnscoredCves,omitempty"`
DiffPlus bool `json:"diffPlus,omitempty"`
DiffMinus bool `json:"diffMinus,omitempty"`
Diff bool `json:"diff,omitempty"`
Lang string `json:"lang,omitempty"`
}
// ValidateOnConfigtest validates
func (c Config) ValidateOnConfigtest() bool {
errs := c.checkSSHKeyExist()
if _, err := govalidator.ValidateStruct(c); err != nil {
errs = append(errs, err)
}
for _, err := range errs {
logging.Log.Error(err)
}
return len(errs) == 0
}
// ValidateOnScan validates configuration
func (c Config) ValidateOnScan() bool {
errs := c.checkSSHKeyExist()
if len(c.ResultsDir) != 0 {
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
errs = append(errs, xerrors.Errorf(
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
}
}
if _, err := govalidator.ValidateStruct(c); err != nil {
errs = append(errs, err)
}
for _, server := range c.Servers {
if !server.Module.IsScanPort() {
continue
}
if es := server.PortScan.Validate(); 0 < len(es) {
errs = append(errs, es...)
}
if es := server.Windows.Validate(); 0 < len(es) {
errs = append(errs, es...)
}
}
for _, err := range errs {
logging.Log.Error(err)
}
return len(errs) == 0
}
func (c Config) checkSSHKeyExist() (errs []error) {
for serverName, v := range c.Servers {
if v.Type == constant.ServerTypePseudo {
continue
}
if v.KeyPath != "" {
if _, err := os.Stat(v.KeyPath); err != nil {
errs = append(errs, xerrors.Errorf(
"%s is invalid. keypath: %s not exists", serverName, v.KeyPath))
}
}
}
return errs
}
// ValidateOnReport validates configuration
func (c *Config) ValidateOnReport() bool {
errs := []error{}
if len(c.ResultsDir) != 0 {
if ok, _ := govalidator.IsFilePath(c.ResultsDir); !ok {
errs = append(errs, xerrors.Errorf(
"JSON base directory must be a *Absolute* file path. -results-dir: %s", c.ResultsDir))
}
}
_, err := govalidator.ValidateStruct(c)
if err != nil {
errs = append(errs, err)
}
for _, rc := range []ReportConf{
&c.EMail,
&c.Slack,
&c.ChatWork,
&c.GoogleChat,
&c.Telegram,
&c.HTTP,
&c.AWS,
&c.Azure,
} {
if es := rc.Validate(); 0 < len(es) {
errs = append(errs, es...)
}
}
for _, cnf := range []VulnDictInterface{
&Conf.CveDict,
&Conf.OvalDict,
&Conf.Gost,
&Conf.Exploit,
&Conf.Metasploit,
&Conf.KEVuln,
&Conf.Cti,
} {
if err := cnf.Validate(); err != nil {
errs = append(errs, xerrors.Errorf("Failed to validate %s: %+v", cnf.GetName(), err))
}
if err := cnf.CheckHTTPHealth(); err != nil {
errs = append(errs, xerrors.Errorf("Run %s as server mode before reporting: %+v", cnf.GetName(), err))
}
}
for _, err := range errs {
logging.Log.Error(err)
}
return len(errs) == 0
}
// ValidateOnSaaS validates configuration
func (c Config) ValidateOnSaaS() bool {
saaserrs := c.Saas.Validate()
for _, err := range saaserrs {
logging.Log.Error("Failed to validate SaaS conf: %+w", err)
}
return len(saaserrs) == 0
}
// WpScanConf is wpscan.com config
type WpScanConf struct {
Token string `toml:"token,omitempty" json:"-"`
DetectInactive bool `toml:"detectInactive,omitempty" json:"detectInactive,omitempty"`
}
// ServerInfo has SSH Info, additional CPE packages to scan.
type ServerInfo struct {
BaseName string `toml:"-" json:"-"`
ServerName string `toml:"-" json:"serverName,omitempty"`
User string `toml:"user,omitempty" json:"user,omitempty"`
Host string `toml:"host,omitempty" json:"host,omitempty"`
IgnoreIPAddresses []string `toml:"ignoreIPAddresses,omitempty" json:"ignoreIPAddresses,omitempty"`
JumpServer []string `toml:"jumpServer,omitempty" json:"jumpServer,omitempty"`
Port string `toml:"port,omitempty" json:"port,omitempty"`
SSHConfigPath string `toml:"sshConfigPath,omitempty" json:"sshConfigPath,omitempty"`
KeyPath string `toml:"keyPath,omitempty" json:"keyPath,omitempty"`
CpeNames []string `toml:"cpeNames,omitempty" json:"cpeNames,omitempty"`
ScanMode []string `toml:"scanMode,omitempty" json:"scanMode,omitempty"`
ScanModules []string `toml:"scanModules,omitempty" json:"scanModules,omitempty"`
OwaspDCXMLPath string `toml:"owaspDCXMLPath,omitempty" json:"owaspDCXMLPath,omitempty"`
ContainersOnly bool `toml:"containersOnly,omitempty" json:"containersOnly,omitempty"`
ContainersIncluded []string `toml:"containersIncluded,omitempty" json:"containersIncluded,omitempty"`
ContainersExcluded []string `toml:"containersExcluded,omitempty" json:"containersExcluded,omitempty"`
ContainerType string `toml:"containerType,omitempty" json:"containerType,omitempty"`
Containers map[string]ContainerSetting `toml:"containers,omitempty" json:"containers,omitempty"`
IgnoreCves []string `toml:"ignoreCves,omitempty" json:"ignoreCves,omitempty"`
IgnorePkgsRegexp []string `toml:"ignorePkgsRegexp,omitempty" json:"ignorePkgsRegexp,omitempty"`
GitHubRepos map[string]GitHubConf `toml:"githubs" json:"githubs,omitempty"` // key: owner/repo
UUIDs map[string]string `toml:"uuids,omitempty" json:"uuids,omitempty"`
Memo string `toml:"memo,omitempty" json:"memo,omitempty"`
Enablerepo []string `toml:"enablerepo,omitempty" json:"enablerepo,omitempty"` // For CentOS, Alma, Rocky, RHEL, Amazon
Optional map[string]interface{} `toml:"optional,omitempty" json:"optional,omitempty"` // Optional key-value set that will be outputted to JSON
Lockfiles []string `toml:"lockfiles,omitempty" json:"lockfiles,omitempty"` // ie) path/to/package-lock.json
FindLock bool `toml:"findLock,omitempty" json:"findLock,omitempty"`
FindLockDirs []string `toml:"findLockDirs,omitempty" json:"findLockDirs,omitempty"`
Type string `toml:"type,omitempty" json:"type,omitempty"` // "pseudo" or ""
IgnoredJSONKeys []string `toml:"ignoredJSONKeys,omitempty" json:"ignoredJSONKeys,omitempty"`
WordPress *WordPressConf `toml:"wordpress,omitempty" json:"wordpress,omitempty"`
PortScan *PortScanConf `toml:"portscan,omitempty" json:"portscan,omitempty"`
Windows *WindowsConf `toml:"windows,omitempty" json:"windows,omitempty"`
IPv4Addrs []string `toml:"-" json:"ipv4Addrs,omitempty"`
IPv6Addrs []string `toml:"-" json:"ipv6Addrs,omitempty"`
IPSIdentifiers map[string]string `toml:"-" json:"ipsIdentifiers,omitempty"`
// internal use
LogMsgAnsiColor string `toml:"-" json:"-"` // DebugLog Color
Container Container `toml:"-" json:"-"`
Distro Distro `toml:"-" json:"-"`
Mode ScanMode `toml:"-" json:"-"`
Module ScanModule `toml:"-" json:"-"`
}
// ContainerSetting is used for loading container setting in config.toml
type ContainerSetting struct {
Cpes []string `json:"cpes,omitempty"`
OwaspDCXMLPath string `json:"owaspDCXMLPath,omitempty"`
IgnorePkgsRegexp []string `json:"ignorePkgsRegexp,omitempty"`
IgnoreCves []string `json:"ignoreCves,omitempty"`
}
// WordPressConf used for WordPress Scanning
type WordPressConf struct {
OSUser string `toml:"osUser,omitempty" json:"osUser,omitempty"`
DocRoot string `toml:"docRoot,omitempty" json:"docRoot,omitempty"`
CmdPath string `toml:"cmdPath,omitempty" json:"cmdPath,omitempty"`
NoSudo bool `toml:"noSudo,omitempty" json:"noSudo,omitempty"`
}
// IsZero return whether this struct is not specified in config.toml
func (cnf WordPressConf) IsZero() bool {
return cnf.OSUser == "" && cnf.DocRoot == "" && cnf.CmdPath == ""
}
// GitHubConf is used for GitHub Security Alerts
type GitHubConf struct {
Token string `json:"-"`
IgnoreGitHubDismissed bool `json:"ignoreGitHubDismissed,omitempty"`
}
// GetServerName returns ServerName if this serverInfo is about host.
// If this serverInfo is about a container, returns containerID@ServerName
func (s ServerInfo) GetServerName() string {
if len(s.Container.ContainerID) == 0 {
return s.ServerName
}
return fmt.Sprintf("%s@%s", s.Container.Name, s.ServerName)
}
// Distro has distribution info
type Distro struct {
Family string
Release string
}
func (l Distro) String() string {
return fmt.Sprintf("%s %s", l.Family, l.Release)
}
// MajorVersion returns Major version
func (l Distro) MajorVersion() (int, error) {
switch l.Family {
case constant.Amazon:
return strconv.Atoi(getAmazonLinuxVersion(l.Release))
case constant.CentOS:
if 0 < len(l.Release) {
return strconv.Atoi(strings.Split(strings.TrimPrefix(l.Release, "stream"), ".")[0])
}
case constant.OpenSUSE:
if l.Release != "" {
if l.Release == "tumbleweed" {
return 0, nil
}
return strconv.Atoi(strings.Split(l.Release, ".")[0])
}
default:
if 0 < len(l.Release) {
return strconv.Atoi(strings.Split(l.Release, ".")[0])
}
}
return 0, xerrors.New("Release is empty")
}
// IsContainer returns whether this ServerInfo is about container
func (s ServerInfo) IsContainer() bool {
return 0 < len(s.Container.ContainerID)
}
// SetContainer set container
func (s *ServerInfo) SetContainer(d Container) {
s.Container = d
}
// Container has Container information.
type Container struct {
ContainerID string
Name string
Image string
}

View File

@@ -1,73 +0,0 @@
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
// ExploitConf is exploit config
type ExploitConf struct {
// DB type for exploit dictionary (sqlite3, mysql, postgres or redis)
Type string
// http://exploit-dictionary.com:1324 or DB connection string
URL string `json:"-"`
// /path/to/exploit.sqlite3
SQLite3Path string `json:"-"`
}
func (cnf *ExploitConf) setDefault() {
if cnf.Type == "" {
cnf.Type = "sqlite3"
}
if cnf.URL == "" && cnf.SQLite3Path == "" {
wd, _ := os.Getwd()
cnf.SQLite3Path = filepath.Join(wd, "go-exploitdb.sqlite3")
}
}
const exploitDBType = "EXPLOITDB_TYPE"
const exploitDBURL = "EXPLOITDB_URL"
const exploitDBPATH = "EXPLOITDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *ExploitConf) Init() {
if os.Getenv(exploitDBType) != "" {
cnf.Type = os.Getenv(exploitDBType)
}
if os.Getenv(exploitDBURL) != "" {
cnf.URL = os.Getenv(exploitDBURL)
}
if os.Getenv(exploitDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(exploitDBPATH)
}
cnf.setDefault()
}
// IsFetchViaHTTP returns wether fetch via http
func (cnf *ExploitConf) IsFetchViaHTTP() bool {
return Conf.Exploit.Type == "http"
}
// CheckHTTPHealth do health check
func (cnf *ExploitConf) CheckHTTPHealth() error {
if !cnf.IsFetchViaHTTP() {
return nil
}
url := fmt.Sprintf("%s/health", cnf.URL)
resp, _, errs := gorequest.New().Get(url).End()
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("Failed to connect to exploit server. url: %s, errs: %s", url, errs)
}
return nil
}

View File

@@ -1,73 +0,0 @@
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
// GoCveDictConf is go-cve-dictionary config
type GoCveDictConf struct {
// DB type of CVE dictionary (sqlite3, mysql, postgres or redis)
Type string
// http://cve-dictionary.com:1323 or DB connection string
URL string `json:"-"`
// /path/to/cve.sqlite3
SQLite3Path string `json:"-"`
}
func (cnf *GoCveDictConf) setDefault() {
if cnf.Type == "" {
cnf.Type = "sqlite3"
}
if cnf.URL == "" && cnf.SQLite3Path == "" {
wd, _ := os.Getwd()
cnf.SQLite3Path = filepath.Join(wd, "cve.sqlite3")
}
}
const cveDBType = "CVEDB_TYPE"
const cveDBURL = "CVEDB_URL"
const cveDBPATH = "CVEDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *GoCveDictConf) Init() {
if os.Getenv(cveDBType) != "" {
cnf.Type = os.Getenv(cveDBType)
}
if os.Getenv(cveDBURL) != "" {
cnf.URL = os.Getenv(cveDBURL)
}
if os.Getenv(cveDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(cveDBPATH)
}
cnf.setDefault()
}
// IsFetchViaHTTP returns wether fetch via http
func (cnf *GoCveDictConf) IsFetchViaHTTP() bool {
return Conf.CveDict.Type == "http"
}
// CheckHTTPHealth checks http server status
func (cnf *GoCveDictConf) CheckHTTPHealth() error {
if !cnf.IsFetchViaHTTP() {
return nil
}
url := fmt.Sprintf("%s/health", cnf.URL)
resp, _, errs := gorequest.New().SetDebug(Conf.Debug).Get(url).End()
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("Failed to request to CVE server. url: %s, errs: %s",
url, errs)
}
return nil
}

32
config/googlechatconf.go Normal file
View File

@@ -0,0 +1,32 @@
package config
import (
"github.com/asaskevich/govalidator"
"golang.org/x/xerrors"
)
// GoogleChatConf is GoogleChat config
type GoogleChatConf struct {
WebHookURL string `valid:"url" json:"-" toml:"webHookURL,omitempty"`
SkipIfNoCve bool `valid:"type(bool)" json:"-" toml:"skipIfNoCve"`
ServerNameRegexp string `valid:"type(string)" json:"-" toml:"serverNameRegexp,omitempty"`
Enabled bool `valid:"type(bool)" json:"-" toml:"-"`
}
// Validate validates configuration
func (c *GoogleChatConf) Validate() (errs []error) {
if !c.Enabled {
return
}
if len(c.WebHookURL) == 0 {
errs = append(errs, xerrors.New("googleChatConf.webHookURL must not be empty"))
}
if !govalidator.IsRegex(c.ServerNameRegexp) {
errs = append(errs, xerrors.New("googleChatConf.serverNameRegexp must be regex"))
}
_, err := govalidator.ValidateStruct(c)
if err != nil {
errs = append(errs, err)
}
return
}

View File

@@ -1,73 +0,0 @@
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
// GostConf is gost config
type GostConf struct {
// DB type for gost dictionary (sqlite3, mysql, postgres or redis)
Type string
// http://gost-dictionary.com:1324 or DB connection string
URL string `json:"-"`
// /path/to/gost.sqlite3
SQLite3Path string `json:"-"`
}
func (cnf *GostConf) setDefault() {
if cnf.Type == "" {
cnf.Type = "sqlite3"
}
if cnf.URL == "" && cnf.SQLite3Path == "" {
wd, _ := os.Getwd()
cnf.SQLite3Path = filepath.Join(wd, "gost.sqlite3")
}
}
const gostDBType = "GOSTDB_TYPE"
const gostDBURL = "GOSTDB_URL"
const gostDBPATH = "GOSTDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *GostConf) Init() {
if os.Getenv(gostDBType) != "" {
cnf.Type = os.Getenv(gostDBType)
}
if os.Getenv(gostDBURL) != "" {
cnf.URL = os.Getenv(gostDBURL)
}
if os.Getenv(gostDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(gostDBPATH)
}
cnf.setDefault()
}
// IsFetchViaHTTP returns wether fetch via http
func (cnf *GostConf) IsFetchViaHTTP() bool {
return Conf.Gost.Type == "http"
}
// CheckHTTPHealth do health check
func (cnf *GostConf) CheckHTTPHealth() error {
if !cnf.IsFetchViaHTTP() {
return nil
}
url := fmt.Sprintf("%s/health", cnf.URL)
resp, _, errs := gorequest.New().Get(url).End()
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("Failed to connect to gost server. url: %s, errs: %s", url, errs)
}
return nil
}

View File

@@ -1,75 +0,0 @@
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
// GovalDictConf is goval-dictionary config
type GovalDictConf struct {
// DB type of OVAL dictionary (sqlite3, mysql, postgres or redis)
Type string
// http://goval-dictionary.com:1324 or DB connection string
URL string `json:"-"`
// /path/to/oval.sqlite3
SQLite3Path string `json:"-"`
}
func (cnf *GovalDictConf) setDefault() {
if cnf.Type == "" {
cnf.Type = "sqlite3"
}
if cnf.URL == "" && cnf.SQLite3Path == "" {
wd, _ := os.Getwd()
cnf.SQLite3Path = filepath.Join(wd, "oval.sqlite3")
}
}
const govalType = "OVALDB_TYPE"
const govalURL = "OVALDB_URL"
const govalPATH = "OVALDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *GovalDictConf) Init() {
if os.Getenv(govalType) != "" {
cnf.Type = os.Getenv(govalType)
}
if os.Getenv(govalURL) != "" {
cnf.URL = os.Getenv(govalURL)
}
if os.Getenv(govalPATH) != "" {
cnf.SQLite3Path = os.Getenv(govalPATH)
}
cnf.setDefault()
}
// IsFetchViaHTTP returns wether fetch via http
func (cnf *GovalDictConf) IsFetchViaHTTP() bool {
return Conf.OvalDict.Type == "http"
}
// CheckHTTPHealth do health check
func (cnf *GovalDictConf) CheckHTTPHealth() error {
if !cnf.IsFetchViaHTTP() {
return nil
}
url := fmt.Sprintf("%s/health", cnf.URL)
resp, _, errs := gorequest.New().Get(url).End()
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("Failed to request to OVAL server. url: %s, errs: %s",
url, errs)
}
return nil
}

View File

@@ -8,31 +8,25 @@ import (
// HTTPConf is HTTP config
type HTTPConf struct {
URL string `valid:"url" json:"-"`
URL string `valid:"url" json:"-"`
Enabled bool `toml:"-" json:"-"`
}
const httpKey = "VULS_HTTP_URL"
// Validate validates configuration
func (c *HTTPConf) Validate() (errs []error) {
if !Conf.ToHTTP {
if !c.Enabled {
return nil
}
// overwrite if env var is not empty
if os.Getenv(httpKey) != "" {
c.URL = os.Getenv(httpKey)
}
if _, err := govalidator.ValidateStruct(c); err != nil {
errs = append(errs, err)
}
return errs
}
const httpKey = "VULS_HTTP_URL"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (c *HTTPConf) Init(toml HTTPConf) {
if os.Getenv(httpKey) != "" {
c.URL = os.Getenv(httpKey)
}
if toml.URL != "" {
c.URL = toml.URL
}
}

View File

@@ -1,9 +0,0 @@
package config
// IPS is
type IPS string
const (
// DeepSecurity is
DeepSecurity IPS = "deepsecurity"
)

View File

@@ -7,6 +7,6 @@ type JSONLoader struct {
}
// Load load the configuration JSON file specified by path arg.
func (c JSONLoader) Load(path, sudoPass, keyPass string) (err error) {
func (c JSONLoader) Load(_, _, _ string) (err error) {
return xerrors.New("Not implement yet")
}

View File

@@ -1,10 +1,9 @@
package config
// Load loads configuration
func Load(path, keyPass string) error {
var loader Loader
loader = TOMLLoader{}
return loader.Load(path, keyPass)
func Load(path string) error {
loader := TOMLLoader{}
return loader.Load(path)
}
// Loader is interface of concrete loader

View File

@@ -1,73 +0,0 @@
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
// MetasploitConf is metasploit config
type MetasploitConf struct {
// DB type for metasploit dictionary (sqlite3, mysql, postgres or redis)
Type string
// http://metasploit-dictionary.com:1324 or DB connection string
URL string `json:"-"`
// /path/to/metasploit.sqlite3
SQLite3Path string `json:"-"`
}
func (cnf *MetasploitConf) setDefault() {
if cnf.Type == "" {
cnf.Type = "sqlite3"
}
if cnf.URL == "" && cnf.SQLite3Path == "" {
wd, _ := os.Getwd()
cnf.SQLite3Path = filepath.Join(wd, "go-msfdb.sqlite3")
}
}
const metasploitDBType = "METASPLOITDB_TYPE"
const metasploitDBURL = "METASPLOITDB_URL"
const metasploitDBPATH = "METASPLOITDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *MetasploitConf) Init() {
if os.Getenv(metasploitDBType) != "" {
cnf.Type = os.Getenv(metasploitDBType)
}
if os.Getenv(metasploitDBURL) != "" {
cnf.URL = os.Getenv(metasploitDBURL)
}
if os.Getenv(metasploitDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(metasploitDBPATH)
}
cnf.setDefault()
}
// IsFetchViaHTTP returns wether fetch via http
func (cnf *MetasploitConf) IsFetchViaHTTP() bool {
return Conf.Metasploit.Type == "http"
}
// CheckHTTPHealth do health check
func (cnf *MetasploitConf) CheckHTTPHealth() error {
if !cnf.IsFetchViaHTTP() {
return nil
}
url := fmt.Sprintf("%s/health", cnf.URL)
resp, _, errs := gorequest.New().Get(url).End()
// resp, _, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("Failed to connect to metasploit server. url: %s, errs: %s", url, errs)
}
return nil
}

View File

@@ -4,59 +4,8 @@ import (
"fmt"
"strings"
"time"
)
const (
// RedHat is
RedHat = "redhat"
// Debian is
Debian = "debian"
// Ubuntu is
Ubuntu = "ubuntu"
// CentOS is
CentOS = "centos"
// Fedora is
// Fedora = "fedora"
// Amazon is
Amazon = "amazon"
// Oracle is
Oracle = "oracle"
// FreeBSD is
FreeBSD = "freebsd"
// Raspbian is
Raspbian = "raspbian"
// Windows is
Windows = "windows"
// OpenSUSE is
OpenSUSE = "opensuse"
// OpenSUSELeap is
OpenSUSELeap = "opensuse.leap"
// SUSEEnterpriseServer is
SUSEEnterpriseServer = "suse.linux.enterprise.server"
// SUSEEnterpriseDesktop is
SUSEEnterpriseDesktop = "suse.linux.enterprise.desktop"
// SUSEOpenstackCloud is
SUSEOpenstackCloud = "suse.openstack.cloud"
// Alpine is
Alpine = "alpine"
// ServerTypePseudo is used for ServerInfo.Type, r.Family
ServerTypePseudo = "pseudo"
"github.com/future-architect/vuls/constant"
)
// EOL has End-of-Life information
@@ -89,16 +38,17 @@ func (e EOL) IsExtendedSuppportEnded(now time.Time) bool {
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/redhat/redhat.go#L20
func GetEOL(family, release string) (eol EOL, found bool) {
switch family {
case Amazon:
rel := "2"
if isAmazonLinux1(release) {
rel = "1"
}
case constant.Amazon:
eol, found = map[string]EOL{
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
"2": {},
}[rel]
case RedHat:
"1": {StandardSupportUntil: time.Date(2023, 6, 30, 23, 59, 59, 0, time.UTC)},
"2": {StandardSupportUntil: time.Date(2025, 6, 30, 23, 59, 59, 0, time.UTC)},
"2022": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
"2023": {StandardSupportUntil: time.Date(2027, 6, 30, 23, 59, 59, 0, time.UTC)},
"2025": {StandardSupportUntil: time.Date(2029, 6, 30, 23, 59, 59, 0, time.UTC)},
"2027": {StandardSupportUntil: time.Date(2031, 6, 30, 23, 59, 59, 0, time.UTC)},
"2029": {StandardSupportUntil: time.Date(2033, 6, 30, 23, 59, 59, 0, time.UTC)},
}[getAmazonLinuxVersion(release)]
case constant.RedHat:
// https://access.redhat.com/support/policy/updates/errata
eol, found = map[string]EOL{
"3": {Ended: true},
@@ -110,23 +60,40 @@ func GetEOL(family, release string) (eol EOL, found bool) {
},
"7": {
StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC),
},
"8": {
StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2031, 5, 31, 23, 59, 59, 0, time.UTC),
},
"9": {
StandardSupportUntil: time.Date(2032, 5, 31, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2034, 5, 31, 23, 59, 59, 0, time.UTC),
},
}[major(release)]
case CentOS:
case constant.CentOS:
// https://en.wikipedia.org/wiki/CentOS#End-of-support_schedule
// TODO Stream
eol, found = map[string]EOL{
"3": {Ended: true},
"4": {Ended: true},
"5": {Ended: true},
"6": {Ended: true},
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
"3": {Ended: true},
"4": {Ended: true},
"5": {Ended: true},
"6": {Ended: true},
"7": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
"8": {StandardSupportUntil: time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC)},
"stream8": {StandardSupportUntil: time.Date(2024, 5, 31, 23, 59, 59, 0, time.UTC)},
"stream9": {StandardSupportUntil: time.Date(2027, 5, 31, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case Oracle:
case constant.Alma:
eol, found = map[string]EOL{
"8": {StandardSupportUntil: time.Date(2029, 12, 31, 23, 59, 59, 0, time.UTC)},
"9": {StandardSupportUntil: time.Date(2032, 5, 31, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case constant.Rocky:
eol, found = map[string]EOL{
"8": {StandardSupportUntil: time.Date(2029, 5, 31, 23, 59, 59, 0, time.UTC)},
"9": {StandardSupportUntil: time.Date(2032, 5, 31, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case constant.Oracle:
eol, found = map[string]EOL{
// Source:
// https://www.oracle.com/a/ocom/docs/elsp-lifetime-069338.pdf
@@ -136,16 +103,22 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"5": {Ended: true},
"6": {
StandardSupportUntil: time.Date(2021, 3, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2024, 3, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2024, 6, 1, 23, 59, 59, 0, time.UTC),
},
"7": {
StandardSupportUntil: time.Date(2024, 7, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2026, 6, 1, 23, 59, 59, 0, time.UTC),
},
"8": {
StandardSupportUntil: time.Date(2029, 7, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2031, 7, 1, 23, 59, 59, 0, time.UTC),
},
"9": {
StandardSupportUntil: time.Date(2032, 6, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2034, 6, 1, 23, 59, 59, 0, time.UTC),
},
}[major(release)]
case Debian:
case constant.Debian:
eol, found = map[string]EOL{
// https://wiki.debian.org/LTS
"6": {Ended: true},
@@ -153,25 +126,46 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"8": {Ended: true},
"9": {StandardSupportUntil: time.Date(2022, 6, 30, 23, 59, 59, 0, time.UTC)},
"10": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
"11": {StandardSupportUntil: time.Date(2026, 6, 30, 23, 59, 59, 0, time.UTC)},
"12": {StandardSupportUntil: time.Date(2028, 6, 30, 23, 59, 59, 0, time.UTC)},
// "13": {StandardSupportUntil: time.Date(2030, 6, 30, 23, 59, 59, 0, time.UTC)},
// "14": {StandardSupportUntil: time.Date(2032, 6, 30, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case Raspbian:
case constant.Raspbian:
// Not found
eol, found = map[string]EOL{}[major(release)]
case Ubuntu:
case constant.Ubuntu:
// https://wiki.ubuntu.com/Releases
eol, found = map[string]EOL{
"14.10": {Ended: true},
"6.06": {Ended: true},
"6.10": {Ended: true},
"7.04": {Ended: true},
"7.10": {Ended: true},
"8.04": {Ended: true},
"8.10": {Ended: true},
"9.04": {Ended: true},
"9.10": {Ended: true},
"10.04": {Ended: true},
"10.10": {Ended: true},
"11.04": {Ended: true},
"11.10": {Ended: true},
"12.04": {Ended: true},
"12.10": {Ended: true},
"13.04": {Ended: true},
"13.10": {Ended: true},
"14.04": {
ExtendedSupportUntil: time.Date(2022, 4, 1, 23, 59, 59, 0, time.UTC),
},
"14.10": {Ended: true},
"15.04": {Ended: true},
"16.10": {Ended: true},
"17.04": {Ended: true},
"17.10": {Ended: true},
"15.10": {Ended: true},
"16.04": {
StandardSupportUntil: time.Date(2021, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2024, 4, 1, 23, 59, 59, 0, time.UTC),
},
"16.10": {Ended: true},
"17.04": {Ended: true},
"17.10": {Ended: true},
"18.04": {
StandardSupportUntil: time.Date(2023, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2028, 4, 1, 23, 59, 59, 0, time.UTC),
@@ -181,19 +175,99 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"19.10": {Ended: true},
"20.04": {
StandardSupportUntil: time.Date(2025, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2030, 4, 1, 23, 59, 59, 0, time.UTC),
},
"20.10": {
StandardSupportUntil: time.Date(2021, 7, 22, 23, 59, 59, 0, time.UTC),
},
"21.04": {
StandardSupportUntil: time.Date(2022, 1, 1, 23, 59, 59, 0, time.UTC),
StandardSupportUntil: time.Date(2022, 1, 20, 23, 59, 59, 0, time.UTC),
},
"21.10": {
StandardSupportUntil: time.Date(2022, 7, 1, 23, 59, 59, 0, time.UTC),
StandardSupportUntil: time.Date(2022, 7, 14, 23, 59, 59, 0, time.UTC),
},
"22.04": {
StandardSupportUntil: time.Date(2027, 4, 1, 23, 59, 59, 0, time.UTC),
ExtendedSupportUntil: time.Date(2032, 4, 1, 23, 59, 59, 0, time.UTC),
},
"22.10": {
StandardSupportUntil: time.Date(2023, 7, 20, 23, 59, 59, 0, time.UTC),
},
"23.04": {
StandardSupportUntil: time.Date(2024, 1, 31, 23, 59, 59, 0, time.UTC),
},
}[release]
case SUSEEnterpriseServer:
//TODO
case Alpine:
case constant.OpenSUSE:
// https://en.opensuse.org/Lifetime
eol, found = map[string]EOL{
"10.2": {Ended: true},
"10.3": {Ended: true},
"11.0": {Ended: true},
"11.1": {Ended: true},
"11.2": {Ended: true},
"11.3": {Ended: true},
"11.4": {Ended: true},
"12.1": {Ended: true},
"12.2": {Ended: true},
"12.3": {Ended: true},
"13.1": {Ended: true},
"13.2": {Ended: true},
"tumbleweed": {},
}[release]
case constant.OpenSUSELeap:
// https://en.opensuse.org/Lifetime
eol, found = map[string]EOL{
"42.1": {Ended: true},
"42.2": {Ended: true},
"42.3": {Ended: true},
"15.0": {Ended: true},
"15.1": {Ended: true},
"15.2": {Ended: true},
"15.3": {StandardSupportUntil: time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC)},
"15.4": {StandardSupportUntil: time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC)},
}[release]
case constant.SUSEEnterpriseServer:
// https://www.suse.com/lifecycle
eol, found = map[string]EOL{
"11": {Ended: true},
"11.1": {Ended: true},
"11.2": {Ended: true},
"11.3": {Ended: true},
"11.4": {Ended: true},
"12": {Ended: true},
"12.1": {Ended: true},
"12.2": {Ended: true},
"12.3": {Ended: true},
"12.4": {Ended: true},
"12.5": {StandardSupportUntil: time.Date(2024, 10, 31, 23, 59, 59, 0, time.UTC)},
"15": {Ended: true},
"15.1": {Ended: true},
"15.2": {Ended: true},
"15.3": {StandardSupportUntil: time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC)},
"15.4": {StandardSupportUntil: time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC)},
}[release]
case constant.SUSEEnterpriseDesktop:
// https://www.suse.com/lifecycle
eol, found = map[string]EOL{
"11": {Ended: true},
"11.1": {Ended: true},
"11.2": {Ended: true},
"11.3": {Ended: true},
"11.4": {Ended: true},
"12": {Ended: true},
"12.1": {Ended: true},
"12.2": {Ended: true},
"12.3": {Ended: true},
"12.4": {Ended: true},
"15": {Ended: true},
"15.1": {Ended: true},
"15.2": {Ended: true},
"15.3": {StandardSupportUntil: time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC)},
"15.4": {StandardSupportUntil: time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC)},
}[release]
case constant.Alpine:
// https://github.com/aquasecurity/trivy/blob/master/pkg/detector/ospkg/alpine/alpine.go#L19
// https://wiki.alpinelinux.org/wiki/Alpine_Linux:Releases
// https://alpinelinux.org/releases/
eol, found = map[string]EOL{
"2.0": {Ended: true},
"2.1": {Ended: true},
@@ -217,8 +291,12 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"3.11": {StandardSupportUntil: time.Date(2021, 11, 1, 23, 59, 59, 0, time.UTC)},
"3.12": {StandardSupportUntil: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC)},
"3.13": {StandardSupportUntil: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC)},
"3.14": {StandardSupportUntil: time.Date(2023, 5, 1, 23, 59, 59, 0, time.UTC)},
"3.15": {StandardSupportUntil: time.Date(2023, 11, 1, 23, 59, 59, 0, time.UTC)},
"3.16": {StandardSupportUntil: time.Date(2024, 5, 23, 23, 59, 59, 0, time.UTC)},
"3.17": {StandardSupportUntil: time.Date(2024, 11, 22, 23, 59, 59, 0, time.UTC)},
}[majorDotMinor(release)]
case FreeBSD:
case constant.FreeBSD:
// https://www.freebsd.org/security/
eol, found = map[string]EOL{
"7": {Ended: true},
@@ -226,8 +304,103 @@ func GetEOL(family, release string) (eol EOL, found bool) {
"9": {Ended: true},
"10": {Ended: true},
"11": {StandardSupportUntil: time.Date(2021, 9, 30, 23, 59, 59, 0, time.UTC)},
"12": {StandardSupportUntil: time.Date(2024, 6, 30, 23, 59, 59, 0, time.UTC)},
"12": {StandardSupportUntil: time.Date(2023, 12, 31, 23, 59, 59, 0, time.UTC)},
"13": {StandardSupportUntil: time.Date(2026, 1, 31, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case constant.Fedora:
// https://docs.fedoraproject.org/en-US/releases/eol/
// https://endoflife.date/fedora
eol, found = map[string]EOL{
"32": {StandardSupportUntil: time.Date(2021, 5, 24, 23, 59, 59, 0, time.UTC)},
"33": {StandardSupportUntil: time.Date(2021, 11, 29, 23, 59, 59, 0, time.UTC)},
"34": {StandardSupportUntil: time.Date(2022, 6, 6, 23, 59, 59, 0, time.UTC)},
"35": {StandardSupportUntil: time.Date(2022, 12, 12, 23, 59, 59, 0, time.UTC)},
"36": {StandardSupportUntil: time.Date(2023, 5, 16, 23, 59, 59, 0, time.UTC)},
"37": {StandardSupportUntil: time.Date(2023, 12, 15, 23, 59, 59, 0, time.UTC)},
"38": {StandardSupportUntil: time.Date(2024, 5, 14, 23, 59, 59, 0, time.UTC)},
}[major(release)]
case constant.Windows:
// https://learn.microsoft.com/ja-jp/lifecycle/products/?products=windows
lhs, rhs, _ := strings.Cut(strings.TrimSuffix(release, "(Server Core installation)"), "for")
switch strings.TrimSpace(lhs) {
case "Windows 7":
eol, found = EOL{StandardSupportUntil: time.Date(2013, 4, 9, 23, 59, 59, 0, time.UTC)}, true
if strings.Contains(rhs, "Service Pack 1") {
eol, found = EOL{StandardSupportUntil: time.Date(2020, 1, 14, 23, 59, 59, 0, time.UTC)}, true
}
case "Windows 8":
eol, found = EOL{StandardSupportUntil: time.Date(2016, 1, 12, 23, 59, 59, 0, time.UTC)}, true
case "Windows 8.1":
eol, found = EOL{StandardSupportUntil: time.Date(2023, 1, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10":
eol, found = EOL{StandardSupportUntil: time.Date(2017, 5, 9, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1511":
eol, found = EOL{StandardSupportUntil: time.Date(2017, 10, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1607":
eol, found = EOL{StandardSupportUntil: time.Date(2018, 4, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1703":
eol, found = EOL{StandardSupportUntil: time.Date(2018, 10, 9, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1709":
eol, found = EOL{StandardSupportUntil: time.Date(2019, 4, 9, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1803":
eol, found = EOL{StandardSupportUntil: time.Date(2019, 11, 12, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1809":
eol, found = EOL{StandardSupportUntil: time.Date(2020, 11, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1903":
eol, found = EOL{StandardSupportUntil: time.Date(2020, 12, 8, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 1909":
eol, found = EOL{StandardSupportUntil: time.Date(2021, 5, 11, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 2004":
eol, found = EOL{StandardSupportUntil: time.Date(2021, 12, 14, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 20H2":
eol, found = EOL{StandardSupportUntil: time.Date(2022, 5, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 21H1":
eol, found = EOL{StandardSupportUntil: time.Date(2022, 12, 13, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 21H2":
eol, found = EOL{StandardSupportUntil: time.Date(2023, 6, 13, 23, 59, 59, 0, time.UTC)}, true
case "Windows 10 Version 22H2":
eol, found = EOL{StandardSupportUntil: time.Date(2024, 5, 14, 23, 59, 59, 0, time.UTC)}, true
case "Windows 11 Version 21H2":
eol, found = EOL{StandardSupportUntil: time.Date(2024, 10, 8, 23, 59, 59, 0, time.UTC)}, true
case "Windows 11 Version 22H2":
eol, found = EOL{StandardSupportUntil: time.Date(2025, 10, 14, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server 2008":
eol, found = EOL{StandardSupportUntil: time.Date(2011, 7, 12, 23, 59, 59, 0, time.UTC)}, true
if strings.Contains(rhs, "Service Pack 2") {
eol, found = EOL{StandardSupportUntil: time.Date(2020, 1, 14, 23, 59, 59, 0, time.UTC)}, true
}
case "Windows Server 2008 R2":
eol, found = EOL{StandardSupportUntil: time.Date(2013, 4, 9, 23, 59, 59, 0, time.UTC)}, true
if strings.Contains(rhs, "Service Pack 1") {
eol, found = EOL{StandardSupportUntil: time.Date(2020, 1, 14, 23, 59, 59, 0, time.UTC)}, true
}
case "Windows Server 2012":
eol, found = EOL{StandardSupportUntil: time.Date(2023, 10, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server 2012 R2":
eol, found = EOL{StandardSupportUntil: time.Date(2023, 10, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server 2016":
eol, found = EOL{StandardSupportUntil: time.Date(2027, 1, 12, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 1709":
eol, found = EOL{StandardSupportUntil: time.Date(2019, 4, 9, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 1803":
eol, found = EOL{StandardSupportUntil: time.Date(2019, 11, 12, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 1809":
eol, found = EOL{StandardSupportUntil: time.Date(2020, 11, 10, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server 2019":
eol, found = EOL{StandardSupportUntil: time.Date(2029, 1, 9, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 1903":
eol, found = EOL{StandardSupportUntil: time.Date(2020, 12, 8, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 1909":
eol, found = EOL{StandardSupportUntil: time.Date(2021, 5, 11, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 2004":
eol, found = EOL{StandardSupportUntil: time.Date(2021, 12, 14, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server, Version 20H2":
eol, found = EOL{StandardSupportUntil: time.Date(2022, 8, 9, 23, 59, 59, 0, time.UTC)}, true
case "Windows Server 2022":
eol, found = EOL{StandardSupportUntil: time.Date(2031, 10, 14, 23, 59, 59, 0, time.UTC)}, true
default:
}
}
return
}
@@ -244,6 +417,26 @@ func majorDotMinor(osVer string) (majorDotMinor string) {
return fmt.Sprintf("%s.%s", ss[0], ss[1])
}
func isAmazonLinux1(osRelease string) bool {
return len(strings.Fields(osRelease)) == 1
func getAmazonLinuxVersion(osRelease string) string {
switch s := strings.Fields(osRelease)[0]; s {
case "1":
return "1"
case "2":
return "2"
case "2022":
return "2022"
case "2023":
return "2023"
case "2025":
return "2025"
case "2027":
return "2027"
case "2029":
return "2029"
default:
if _, err := time.Parse("2006.01", s); err == nil {
return "1"
}
return "unknown"
}
}

View File

@@ -3,6 +3,8 @@ package config
import (
"testing"
"time"
. "github.com/future-architect/vuls/constant"
)
func TestEOL_IsStandardSupportEnded(t *testing.T) {
@@ -43,7 +45,39 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
extEnded: false,
found: true,
},
{
name: "amazon linux 2022 supported",
fields: fields{family: Amazon, release: "2022 (Amazon Linux)"},
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "amazon linux 2023 supported",
fields: fields{family: Amazon, release: "2023"},
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "amazon linux 2031 not found",
fields: fields{family: Amazon, release: "2031"},
now: time.Date(2023, 7, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
//RHEL
{
name: "RHEL6 eol",
fields: fields{family: RedHat, release: "6"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: false,
found: true,
},
{
name: "RHEL7 supported",
fields: fields{family: RedHat, release: "7"},
@@ -61,22 +95,30 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "RHEL6 eol",
fields: fields{family: RedHat, release: "6"},
name: "RHEL9 supported",
fields: fields{family: RedHat, release: "9"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "RHEL9 not found",
fields: fields{family: RedHat, release: "9"},
name: "RHEL10 not found",
fields: fields{family: RedHat, release: "10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
//CentOS
{
name: "CentOS 6 eol",
fields: fields{family: CentOS, release: "6"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "CentOS 7 supported",
fields: fields{family: CentOS, release: "7"},
@@ -94,22 +136,88 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "CentOS 6 eol",
fields: fields{family: CentOS, release: "6"},
name: "CentOS stream8 supported",
fields: fields{family: CentOS, release: "stream8"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "CentOS 9 not found",
fields: fields{family: CentOS, release: "9"},
name: "CentOS stream9 supported",
fields: fields{family: CentOS, release: "stream9"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "CentOS stream10 Not Found",
fields: fields{family: CentOS, release: "stream10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
// Alma
{
name: "Alma Linux 8 supported",
fields: fields{family: Alma, release: "8"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alma Linux 9 supported",
fields: fields{family: Alma, release: "9"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alma Linux 10 Not Found",
fields: fields{family: Alma, release: "10"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
// Rocky
{
name: "Rocky Linux 8 supported",
fields: fields{family: Rocky, release: "8"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Rocky Linux 9 supported",
fields: fields{family: Rocky, release: "9"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Rocky Linux 10 Not Found",
fields: fields{family: Rocky, release: "10"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
//Oracle
{
name: "Oracle Linux 6 eol",
fields: fields{family: Oracle, release: "6"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Oracle Linux 7 supported",
fields: fields{family: Oracle, release: "7"},
@@ -127,16 +235,16 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "Oracle Linux 6 eol",
fields: fields{family: Oracle, release: "6"},
name: "Oracle Linux 9 supported",
fields: fields{family: Oracle, release: "9"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Oracle Linux 9 not found",
fields: fields{family: Oracle, release: "9"},
name: "Oracle Linux 10 not found",
fields: fields{family: Oracle, release: "10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
@@ -144,28 +252,12 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
},
//Ubuntu
{
name: "Ubuntu 18.04 supported",
fields: fields{family: Ubuntu, release: "18.04"},
name: "Ubuntu 5.10 not found",
fields: fields{family: Ubuntu, release: "5.10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
found: false,
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Ubuntu 18.04 ext supported",
fields: fields{family: Ubuntu, release: "18.04"},
now: time.Date(2025, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: false,
found: true,
},
{
name: "Ubuntu 16.04 supported",
fields: fields{family: Ubuntu, release: "18.04"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Ubuntu 14.04 eol",
@@ -184,10 +276,50 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "Ubuntu 12.10 not found",
fields: fields{family: Ubuntu, release: "12.10"},
name: "Ubuntu 16.04 supported",
fields: fields{family: Ubuntu, release: "18.04"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
found: false,
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Ubuntu 18.04 supported",
fields: fields{family: Ubuntu, release: "18.04"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Ubuntu 18.04 ext supported",
fields: fields{family: Ubuntu, release: "18.04"},
now: time.Date(2025, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: false,
found: true,
},
{
name: "Ubuntu 20.04 supported",
fields: fields{family: Ubuntu, release: "20.04"},
now: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
{
name: "Ubuntu 20.04 ext supported",
fields: fields{family: Ubuntu, release: "20.04"},
now: time.Date(2025, 5, 1, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: true,
extEnded: false,
},
{
name: "Ubuntu 20.10 supported",
fields: fields{family: Ubuntu, release: "20.10"},
now: time.Date(2021, 5, 1, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
@@ -199,7 +331,47 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
stdEnded: false,
extEnded: false,
},
{
name: "Ubuntu 21.10 supported",
fields: fields{family: Ubuntu, release: "21.10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
{
name: "Ubuntu 22.04 supported",
fields: fields{family: Ubuntu, release: "22.04"},
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
{
name: "Ubuntu 22.10 supported",
fields: fields{family: Ubuntu, release: "22.10"},
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
{
name: "Ubuntu 23.04 supported",
fields: fields{family: Ubuntu, release: "23.04"},
now: time.Date(2023, 3, 16, 23, 59, 59, 0, time.UTC),
found: true,
stdEnded: false,
extEnded: false,
},
//Debian
{
name: "Debian 8 supported",
fields: fields{family: Debian, release: "8"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Debian 9 supported",
fields: fields{family: Debian, release: "9"},
@@ -217,16 +389,24 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "Debian 8 supported",
fields: fields{family: Debian, release: "8"},
name: "Debian 11 supported",
fields: fields{family: Debian, release: "11"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Debian 11 supported",
fields: fields{family: Debian, release: "11"},
name: "Debian 12 supported",
fields: fields{family: Debian, release: "12"},
now: time.Date(2023, 6, 10, 0, 0, 0, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Debian 13 is not supported yet",
fields: fields{family: Debian, release: "13"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
@@ -266,14 +446,54 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "Alpine 3.14 not found",
name: "Alpine 3.14 supported",
fields: fields{family: Alpine, release: "3.14"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
now: time.Date(2022, 5, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alpine 3.15 supported",
fields: fields{family: Alpine, release: "3.15"},
now: time.Date(2022, 11, 1, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alpine 3.16 supported",
fields: fields{family: Alpine, release: "3.16"},
now: time.Date(2024, 5, 23, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alpine 3.17 supported",
fields: fields{family: Alpine, release: "3.17"},
now: time.Date(2022, 1, 14, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Alpine 3.18 not found",
fields: fields{family: Alpine, release: "3.18"},
now: time.Date(2022, 1, 14, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
// freebsd
{
name: "freebsd 10 eol",
fields: fields{family: FreeBSD, release: "10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "freebsd 11 supported",
fields: fields{family: FreeBSD, release: "11"},
@@ -299,13 +519,150 @@ func TestEOL_IsStandardSupportEnded(t *testing.T) {
found: true,
},
{
name: "freebsd 10 eol",
fields: fields{family: FreeBSD, release: "10"},
now: time.Date(2021, 1, 6, 23, 59, 59, 0, time.UTC),
name: "freebsd 13 supported",
fields: fields{family: FreeBSD, release: "13"},
now: time.Date(2021, 7, 2, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
// Fedora
{
name: "Fedora 32 supported",
fields: fields{family: Fedora, release: "32"},
now: time.Date(2021, 5, 24, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 32 eol since 2021-5-25",
fields: fields{family: Fedora, release: "32"},
now: time.Date(2021, 5, 25, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 33 supported",
fields: fields{family: Fedora, release: "33"},
now: time.Date(2021, 11, 29, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 33 eol since 2021-11-30",
fields: fields{family: Fedora, release: "32"},
now: time.Date(2021, 11, 30, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 34 supported",
fields: fields{family: Fedora, release: "34"},
now: time.Date(2022, 6, 6, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 34 eol since 2022-6-7",
fields: fields{family: Fedora, release: "34"},
now: time.Date(2022, 6, 7, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 35 supported",
fields: fields{family: Fedora, release: "35"},
now: time.Date(2022, 12, 12, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 35 eol since 2022-12-13",
fields: fields{family: Fedora, release: "35"},
now: time.Date(2022, 12, 13, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 36 supported",
fields: fields{family: Fedora, release: "36"},
now: time.Date(2023, 5, 16, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 36 eol since 2023-05-17",
fields: fields{family: Fedora, release: "36"},
now: time.Date(2023, 5, 17, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 37 supported",
fields: fields{family: Fedora, release: "37"},
now: time.Date(2023, 12, 15, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 37 eol since 2023-12-16",
fields: fields{family: Fedora, release: "37"},
now: time.Date(2023, 12, 16, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 38 supported",
fields: fields{family: Fedora, release: "38"},
now: time.Date(2024, 5, 14, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
{
name: "Fedora 38 eol since 2024-05-15",
fields: fields{family: Fedora, release: "38"},
now: time.Date(2024, 5, 15, 0, 0, 0, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Fedora 39 not found",
fields: fields{family: Fedora, release: "39"},
now: time.Date(2024, 5, 14, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: false,
},
{
name: "Windows 10 EOL",
fields: fields{family: Windows, release: "Windows 10 for x64-based Systems"},
now: time.Date(2022, 12, 8, 23, 59, 59, 0, time.UTC),
stdEnded: true,
extEnded: true,
found: true,
},
{
name: "Windows 10 Version 22H2 supported",
fields: fields{family: Windows, release: "Windows 10 Version 22H2 for x64-based Systems"},
now: time.Date(2022, 12, 8, 23, 59, 59, 0, time.UTC),
stdEnded: false,
extEnded: false,
found: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@@ -371,3 +728,58 @@ func Test_majorDotMinor(t *testing.T) {
})
}
}
func Test_getAmazonLinuxVersion(t *testing.T) {
tests := []struct {
release string
want string
}{
{
release: "2017.09",
want: "1",
},
{
release: "2018.03",
want: "1",
},
{
release: "1",
want: "1",
},
{
release: "2",
want: "2",
},
{
release: "2022",
want: "2022",
},
{
release: "2023",
want: "2023",
},
{
release: "2025",
want: "2025",
},
{
release: "2027",
want: "2027",
},
{
release: "2029",
want: "2029",
},
{
release: "2031",
want: "unknown",
},
}
for _, tt := range tests {
t.Run(tt.release, func(t *testing.T) {
if got := getAmazonLinuxVersion(tt.release); got != tt.want {
t.Errorf("getAmazonLinuxVersion() = %v, want %v", got, tt.want)
}
})
}
}

222
config/portscan.go Normal file
View File

@@ -0,0 +1,222 @@
package config
import (
"os"
"os/exec"
"strconv"
"strings"
"github.com/asaskevich/govalidator"
"golang.org/x/xerrors"
)
// PortScanConf is the setting for using an external port scanner
type PortScanConf struct {
IsUseExternalScanner bool `toml:"-" json:"-"`
// Path to external scanner
ScannerBinPath string `toml:"scannerBinPath,omitempty" json:"scannerBinPath,omitempty"`
// set user has privileged
HasPrivileged bool `toml:"hasPrivileged,omitempty" json:"hasPrivileged,omitempty"`
// set the ScanTechniques for ScannerBinPath
ScanTechniques []string `toml:"scanTechniques,omitempty" json:"scanTechniques,omitempty"`
// set the FIREWALL/IDS EVASION AND SPOOFING(Use given port number)
SourcePort string `toml:"sourcePort,omitempty" json:"sourcePort,omitempty"`
}
// ScanTechnique is implemented to represent the supported ScanTechniques in an Enum.
type ScanTechnique int
const (
// NotSupportTechnique is a ScanTechnique that is currently not supported.
NotSupportTechnique ScanTechnique = iota
// TCPSYN is SYN scan
TCPSYN
// TCPConnect is TCP connect scan
TCPConnect
// TCPACK is ACK scan
TCPACK
// TCPWindow is Window scan
TCPWindow
// TCPMaimon is Maimon scan
TCPMaimon
// TCPNull is Null scan
TCPNull
// TCPFIN is FIN scan
TCPFIN
// TCPXmas is Xmas scan
TCPXmas
)
var scanTechniqueMap = map[ScanTechnique]string{
TCPSYN: "sS",
TCPConnect: "sT",
TCPACK: "sA",
TCPWindow: "sW",
TCPMaimon: "sM",
TCPNull: "sN",
TCPFIN: "sF",
TCPXmas: "sX",
}
func (s ScanTechnique) String() string {
switch s {
case TCPSYN:
return "TCPSYN"
case TCPConnect:
return "TCPConnect"
case TCPACK:
return "TCPACK"
case TCPWindow:
return "TCPWindow"
case TCPMaimon:
return "TCPMaimon"
case TCPNull:
return "TCPNull"
case TCPFIN:
return "TCPFIN"
case TCPXmas:
return "TCPXmas"
default:
return "NotSupportTechnique"
}
}
// GetScanTechniques converts ScanTechniques loaded from config.toml to []scanTechniques.
func (c *PortScanConf) GetScanTechniques() []ScanTechnique {
if len(c.ScanTechniques) == 0 {
return []ScanTechnique{}
}
scanTechniques := []ScanTechnique{}
for _, technique := range c.ScanTechniques {
findScanTechniqueFlag := false
for key, value := range scanTechniqueMap {
if strings.EqualFold(value, technique) {
scanTechniques = append(scanTechniques, key)
findScanTechniqueFlag = true
break
}
}
if !findScanTechniqueFlag {
scanTechniques = append(scanTechniques, NotSupportTechnique)
}
}
if len(scanTechniques) == 0 {
return []ScanTechnique{NotSupportTechnique}
}
return scanTechniques
}
// Validate validates configuration
func (c *PortScanConf) Validate() (errs []error) {
if !c.IsUseExternalScanner {
if c.IsZero() {
return
}
errs = append(errs, xerrors.New("To enable the PortScan option, ScannerBinPath must be set."))
}
if _, err := os.Stat(c.ScannerBinPath); err != nil {
errs = append(errs, xerrors.Errorf(
"scanner is not found. ScannerBinPath: %s not exists", c.ScannerBinPath))
}
scanTechniques := c.GetScanTechniques()
for _, scanTechnique := range scanTechniques {
if scanTechnique == NotSupportTechnique {
errs = append(errs, xerrors.New("There is an unsupported option in ScanTechniques."))
}
}
// It does not currently support multiple ScanTechniques.
// But if it supports UDP scanning, it will need to accept multiple ScanTechniques.
if len(scanTechniques) > 1 {
errs = append(errs, xerrors.New("Currently multiple ScanTechniques are not supported."))
}
if c.HasPrivileged {
if os.Geteuid() != 0 {
output, err := exec.Command("getcap", c.ScannerBinPath).Output()
if err != nil {
errs = append(errs, xerrors.Errorf("Failed to check capability of %s. error message: %w", c.ScannerBinPath, err))
} else {
parseOutput := strings.SplitN(string(output), "=", 2)
if len(parseOutput) != 2 {
errs = append(errs, xerrors.Errorf("Failed to parse getcap outputs. please execute this command: `$ getcap %s`. If the following string (`/usr/bin/nmap = ... `) is not displayed, you need to set the capability with the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", c.ScannerBinPath, c.ScannerBinPath))
} else {
parseCapability := strings.Split(strings.TrimSpace(parseOutput[1]), "+")
capabilities := strings.Split(parseCapability[0], ",")
for _, needCap := range []string{"cap_net_bind_service", "cap_net_admin", "cap_net_raw"} {
existCapFlag := false
for _, cap := range capabilities {
if needCap == cap {
existCapFlag = true
break
}
}
if existCapFlag {
continue
}
errs = append(errs, xerrors.Errorf("Not enough capability to execute. needs: ['cap_net_bind_service', 'cap_net_admin', 'cap_net_raw'], actual: %s. To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", capabilities, c.ScannerBinPath))
break
}
if parseCapability[1] != "eip" {
errs = append(errs, xerrors.Errorf("Capability(`cap_net_bind_service,cap_net_admin,cap_net_raw`) must belong to the following capability set(need: eip, actual: %s). To fix this, run the following command. `$ setcap cap_net_raw,cap_net_admin,cap_net_bind_service+eip %s`", parseCapability[1], c.ScannerBinPath))
}
}
}
}
}
if !c.HasPrivileged {
for _, scanTechnique := range scanTechniques {
if scanTechnique != TCPConnect && scanTechnique != NotSupportTechnique {
errs = append(errs, xerrors.New("If not privileged, only TCPConnect Scan(-sT) can be used."))
break
}
}
}
if c.SourcePort != "" {
for _, scanTechnique := range scanTechniques {
if scanTechnique == TCPConnect {
errs = append(errs, xerrors.New("SourcePort Option(-g/--source-port) is incompatible with the default TCPConnect Scan(-sT)."))
break
}
}
portNumber, err := strconv.Atoi(c.SourcePort)
if err != nil {
errs = append(errs, xerrors.Errorf("SourcePort conversion failed. %w", err))
} else {
if portNumber < 0 || 65535 < portNumber {
errs = append(errs, xerrors.Errorf("SourcePort(%s) must be between 0 and 65535.", c.SourcePort))
}
if portNumber == 0 {
errs = append(errs, xerrors.New("SourcePort(0) may not work on all systems."))
}
}
}
_, err := govalidator.ValidateStruct(c)
if err != nil {
errs = append(errs, err)
}
return
}
// IsZero return whether this struct is not specified in config.toml
func (c PortScanConf) IsZero() bool {
return c.ScannerBinPath == "" && !c.HasPrivileged && len(c.ScanTechniques) == 0 && c.SourcePort == ""
}

69
config/portscan_test.go Normal file
View File

@@ -0,0 +1,69 @@
package config
import (
"reflect"
"testing"
)
func TestPortScanConf_getScanTechniques(t *testing.T) {
tests := []struct {
name string
techniques []string
want []ScanTechnique
}{
{
name: "nil",
techniques: []string{},
want: []ScanTechnique{},
},
{
name: "single",
techniques: []string{"sS"},
want: []ScanTechnique{TCPSYN},
},
{
name: "multiple",
techniques: []string{"sS", "sT"},
want: []ScanTechnique{TCPSYN, TCPConnect},
},
{
name: "unknown",
techniques: []string{"sU"},
want: []ScanTechnique{NotSupportTechnique},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := PortScanConf{ScanTechniques: tt.techniques}
if got := c.GetScanTechniques(); !reflect.DeepEqual(got, tt.want) {
t.Errorf("PortScanConf.getScanTechniques() = %v, want %v", got, tt.want)
}
})
}
}
func TestPortScanConf_IsZero(t *testing.T) {
tests := []struct {
name string
conf PortScanConf
want bool
}{
{
name: "not zero",
conf: PortScanConf{ScannerBinPath: "/usr/bin/nmap"},
want: false,
},
{
name: "zero",
conf: PortScanConf{},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.conf.IsZero(); got != tt.want {
t.Errorf("PortScanConf.IsZero() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -84,7 +84,7 @@ func (s ScanMode) String() string {
return ss + " mode"
}
func setScanMode(server *ServerInfo, d ServerInfo) error {
func setScanMode(server *ServerInfo) error {
if len(server.ScanMode) == 0 {
server.ScanMode = Conf.Default.ScanMode
}

View File

@@ -16,11 +16,12 @@ type SlackConf struct {
AuthUser string `json:"-" toml:"authUser,omitempty"`
NotifyUsers []string `toml:"notifyUsers,omitempty" json:"-"`
Text string `json:"-"`
Enabled bool `toml:"-" json:"-"`
}
// Validate validates configuration
func (c *SlackConf) Validate() (errs []error) {
if !Conf.ToSlack {
if !c.Enabled {
return
}

View File

@@ -15,6 +15,7 @@ type SMTPConf struct {
To []string `toml:"to,omitempty" json:"-"`
Cc []string `toml:"cc,omitempty" json:"-"`
SubjectPrefix string `toml:"subjectPrefix,omitempty" json:"-"`
Enabled bool `toml:"-" json:"-"`
}
func checkEmails(emails []string) (errs []error) {
@@ -31,10 +32,9 @@ func checkEmails(emails []string) (errs []error) {
// Validate SMTP configuration
func (c *SMTPConf) Validate() (errs []error) {
if !Conf.ToEmail {
if !c.Enabled {
return
}
// Check Emails fromat
emails := []string{}
emails = append(emails, c.From)
emails = append(emails, c.To...)
@@ -44,10 +44,10 @@ func (c *SMTPConf) Validate() (errs []error) {
errs = append(errs, emailErrs...)
}
if len(c.SMTPAddr) == 0 {
if c.SMTPAddr == "" {
errs = append(errs, xerrors.New("email.smtpAddr must not be empty"))
}
if len(c.SMTPPort) == 0 {
if c.SMTPPort == "" {
errs = append(errs, xerrors.New("email.smtpPort must not be empty"))
}
if len(c.To) == 0 {

View File

@@ -1,3 +1,5 @@
//go:build !windows
package config
import (
@@ -17,11 +19,12 @@ type SyslogConf struct {
Facility string `json:"-"`
Tag string `json:"-"`
Verbose bool `json:"-"`
Enabled bool `toml:"-" json:"-"`
}
// Validate validates configuration
func (c *SyslogConf) Validate() (errs []error) {
if !Conf.ToSyslog {
if !c.Enabled {
return nil
}
// If protocol is empty, it will connect to the local syslog server.

View File

@@ -7,13 +7,14 @@ import (
// TelegramConf is Telegram config
type TelegramConf struct {
Token string `json:"-"`
ChatID string `json:"-"`
Token string `json:"-"`
ChatID string `json:"-"`
Enabled bool `toml:"-" json:"-"`
}
// Validate validates configuration
func (c *TelegramConf) Validate() (errs []error) {
if !Conf.ToTelegram {
if !c.Enabled {
return
}
if len(c.ChatID) == 0 {

View File

@@ -1,12 +1,17 @@
package config
import (
"fmt"
"net"
"regexp"
"strings"
"github.com/BurntSushi/toml"
"github.com/c-robinson/iplib"
"github.com/knqyf263/go-cpe/naming"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/constant"
)
// TOMLLoader loads config
@@ -14,32 +19,45 @@ type TOMLLoader struct {
}
// Load load the configuration TOML file specified by path arg.
func (c TOMLLoader) Load(pathToToml, keyPass string) error {
func (c TOMLLoader) Load(pathToToml string) error {
// util.Log.Infof("Loading config: %s", pathToToml)
if _, err := toml.DecodeFile(pathToToml, &Conf); err != nil {
return err
}
if keyPass != "" {
Conf.Default.KeyPassword = keyPass
for _, cnf := range []VulnDictInterface{
&Conf.CveDict,
&Conf.OvalDict,
&Conf.Gost,
&Conf.Exploit,
&Conf.Metasploit,
&Conf.KEVuln,
&Conf.Cti,
} {
cnf.Init()
}
Conf.CveDict.Init()
Conf.OvalDict.Init()
Conf.Gost.Init()
Conf.Exploit.Init()
Conf.Metasploit.Init()
index := 0
servers := map[string]ServerInfo{}
for name, server := range Conf.Servers {
server.ServerName = name
if 0 < len(server.KeyPassword) {
return xerrors.Errorf("[Deprecated] KEYPASSWORD IN CONFIG FILE ARE UNSECURE. REMOVE THEM IMMEDIATELY FOR A SECURITY REASONS. THEY WILL BE REMOVED IN A FUTURE RELEASE: %s", name)
server.BaseName = name
if server.Type != constant.ServerTypePseudo && server.Host == "" {
return xerrors.New("Failed to find hosts. err: server.host is empty")
}
serverHosts, err := hosts(server.Host, server.IgnoreIPAddresses)
if err != nil {
return xerrors.Errorf("Failed to find hosts. err: %w", err)
}
if len(serverHosts) == 0 {
return xerrors.New("Failed to find hosts. err: zero enumerated hosts")
}
if err := setDefaultIfEmpty(&server, Conf.Default); err != nil {
if err := setDefaultIfEmpty(&server); err != nil {
return xerrors.Errorf("Failed to set default value to config. server: %s, err: %w", name, err)
}
if err := setScanMode(&server, Conf.Default); err != nil {
if err := setScanMode(&server); err != nil {
return xerrors.Errorf("Failed to set ScanMode: %w", err)
}
@@ -93,20 +111,17 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
for _, reg := range cont.IgnorePkgsRegexp {
_, err := regexp.Compile(reg)
if err != nil {
return xerrors.Errorf("Failed to parse %s in %s@%s. err: %w",
reg, contName, name, err)
return xerrors.Errorf("Failed to parse %s in %s@%s. err: %w", reg, contName, name, err)
}
}
}
for ownerRepo, githubSetting := range server.GitHubRepos {
if ss := strings.Split(ownerRepo, "/"); len(ss) != 2 {
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s in %s",
ownerRepo, name)
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s in %s", ownerRepo, name)
}
if githubSetting.Token == "" {
return xerrors.Errorf("GitHub owner/repo: %s in %s token is empty",
ownerRepo, name)
return xerrors.Errorf("GitHub owner/repo: %s in %s token is empty", ownerRepo, name)
}
}
@@ -119,44 +134,112 @@ func (c TOMLLoader) Load(pathToToml, keyPass string) error {
case "base", "updates":
// nop
default:
return xerrors.Errorf(
"For now, enablerepo have to be base or updates: %s",
server.Enablerepo)
return xerrors.Errorf("For now, enablerepo have to be base or updates: %s", server.Enablerepo)
}
}
}
server.LogMsgAnsiColor = Colors[index%len(Colors)]
index++
if server.PortScan.ScannerBinPath != "" {
server.PortScan.IsUseExternalScanner = true
}
Conf.Servers[name] = server
if !isCIDRNotation(server.Host) {
server.ServerName = name
servers[server.ServerName] = server
continue
}
for _, host := range serverHosts {
server.Host = host
server.ServerName = fmt.Sprintf("%s(%s)", name, host)
server.LogMsgAnsiColor = Colors[index%len(Colors)]
index++
servers[server.ServerName] = server
}
}
Conf.Servers = servers
return nil
}
func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
if server.Type != ServerTypePseudo {
if len(server.Host) == 0 {
return xerrors.Errorf("server.host is empty")
}
func hosts(host string, ignores []string) ([]string, error) {
hostMap := map[string]struct{}{}
hosts, err := enumerateHosts(host)
if err != nil {
return nil, xerrors.Errorf("Failed to enumarate hosts. err: %w", err)
}
for _, host := range hosts {
hostMap[host] = struct{}{}
}
for _, ignore := range ignores {
hosts, err := enumerateHosts(ignore)
if err != nil {
return nil, xerrors.Errorf("Failed to enumarate hosts. err: %w", err)
}
if len(hosts) == 1 && net.ParseIP(hosts[0]) == nil {
return nil, xerrors.Errorf("Failed to ignore hosts. err: a non-IP address has been entered in ignoreIPAddress")
}
for _, host := range hosts {
delete(hostMap, host)
}
}
hosts = []string{}
for host := range hostMap {
hosts = append(hosts, host)
}
return hosts, nil
}
func enumerateHosts(host string) ([]string, error) {
if !isCIDRNotation(host) {
return []string{host}, nil
}
ipAddr, ipNet, err := net.ParseCIDR(host)
if err != nil {
return nil, xerrors.Errorf("Failed to parse CIDR. err: %w", err)
}
maskLen, _ := ipNet.Mask.Size()
addrs := []string{}
if net.ParseIP(ipAddr.String()).To4() != nil {
n := iplib.NewNet4(ipAddr, int(maskLen))
for _, addr := range n.Enumerate(int(n.Count()), 0) {
addrs = append(addrs, addr.String())
}
} else if net.ParseIP(ipAddr.String()).To16() != nil {
n := iplib.NewNet6(ipAddr, int(maskLen), 0)
if !n.Count().IsInt64() {
return nil, xerrors.Errorf("Failed to enumerate IP address. err: mask bitsize too big")
}
for _, addr := range n.Enumerate(int(n.Count().Int64()), 0) {
addrs = append(addrs, addr.String())
}
}
return addrs, nil
}
func isCIDRNotation(host string) bool {
ss := strings.Split(host, "/")
if len(ss) == 1 || net.ParseIP(ss[0]) == nil {
return false
}
return true
}
func setDefaultIfEmpty(server *ServerInfo) error {
if server.Type != constant.ServerTypePseudo {
if len(server.JumpServer) == 0 {
server.JumpServer = Conf.Default.JumpServer
}
if server.Port == "" {
if Conf.Default.Port != "" {
server.Port = Conf.Default.Port
} else {
server.Port = "22"
}
server.Port = Conf.Default.Port
}
if server.User == "" {
server.User = Conf.Default.User
if server.User == "" && server.Port != "local" {
return xerrors.Errorf("server.user is empty")
}
}
if server.SSHConfigPath == "" {
@@ -166,10 +249,6 @@ func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
if server.KeyPath == "" {
server.KeyPath = Conf.Default.KeyPath
}
if server.KeyPassword == "" {
server.KeyPassword = Conf.Default.KeyPassword
}
}
if len(server.Lockfiles) == 0 {
@@ -208,6 +287,20 @@ func setDefaultIfEmpty(server *ServerInfo, d ServerInfo) error {
}
}
if server.PortScan == nil {
server.PortScan = Conf.Default.PortScan
if server.PortScan == nil {
server.PortScan = &PortScanConf{}
}
}
if server.Windows == nil {
server.Windows = Conf.Default.Windows
if server.Windows == nil {
server.Windows = &WindowsConf{}
}
}
if len(server.IgnoredJSONKeys) == 0 {
server.IgnoredJSONKeys = Conf.Default.IgnoredJSONKeys
}

View File

@@ -1,9 +1,102 @@
package config
import (
"reflect"
"sort"
"testing"
)
func TestHosts(t *testing.T) {
var tests = []struct {
in string
ignore []string
expected []string
err bool
}{
{
in: "127.0.0.1",
expected: []string{"127.0.0.1"},
err: false,
},
{
in: "127.0.0.1",
ignore: []string{"127.0.0.1"},
expected: []string{},
err: false,
},
{
in: "ssh/host",
expected: []string{"ssh/host"},
err: false,
},
{
in: "192.168.1.1/30",
expected: []string{"192.168.1.1", "192.168.1.2"},
err: false,
},
{
in: "192.168.1.1/30",
ignore: []string{"192.168.1.1"},
expected: []string{"192.168.1.2"},
err: false,
},
{
in: "192.168.1.1/30",
ignore: []string{"ignore"},
err: true,
},
{
in: "192.168.1.1/30",
ignore: []string{"192.168.1.1/30"},
expected: []string{},
err: false,
},
{
in: "192.168.1.1/31",
expected: []string{"192.168.1.0", "192.168.1.1"},
err: false,
},
{
in: "192.168.1.1/32",
expected: []string{"192.168.1.1"},
err: false,
},
{
in: "2001:4860:4860::8888/126",
expected: []string{"2001:4860:4860::8888", "2001:4860:4860::8889", "2001:4860:4860::888a", "2001:4860:4860::888b"},
err: false,
},
{
in: "2001:4860:4860::8888/127",
expected: []string{"2001:4860:4860::8888", "2001:4860:4860::8889"},
err: false,
},
{
in: "2001:4860:4860::8888/128",
expected: []string{"2001:4860:4860::8888"},
err: false,
},
{
in: "2001:4860:4860::8888/32",
err: true,
},
}
for i, tt := range tests {
actual, err := hosts(tt.in, tt.ignore)
sort.Slice(actual, func(i, j int) bool { return actual[i] < actual[j] })
if err != nil && !tt.err {
t.Errorf("[%d] unexpected error occurred, in: %s act: %s, exp: %s",
i, tt.in, actual, tt.expected)
} else if err == nil && tt.err {
t.Errorf("[%d] expected error is not occurred, in: %s act: %s, exp: %s",
i, tt.in, actual, tt.expected)
}
if !reflect.DeepEqual(actual, tt.expected) {
t.Errorf("[%d] in: %s, actual: %q, expected: %q", i, tt.in, actual, tt.expected)
}
}
}
func TestToCpeURI(t *testing.T) {
var tests = []struct {
in string

330
config/vulnDictConf.go Normal file
View File

@@ -0,0 +1,330 @@
package config
import (
"fmt"
"os"
"path/filepath"
"time"
"github.com/asaskevich/govalidator"
"github.com/future-architect/vuls/logging"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
// VulnDictInterface is an interface of vulnsrc
type VulnDictInterface interface {
Init()
Validate() error
IsFetchViaHTTP() bool
CheckHTTPHealth() error
GetName() string
GetType() string
GetURL() string
GetSQLite3Path() string
GetDebugSQL() bool
}
// VulnDict is a base struct of vuln dicts
type VulnDict struct {
Name string
// DB type of CVE dictionary (sqlite3, mysql, postgres or redis)
Type string
// http://cve-dictionary.com:1323 or DB connection string
URL string `json:"-"`
// /path/to/cve.sqlite3
SQLite3Path string
DebugSQL bool
}
// GetType returns type
func (cnf VulnDict) GetType() string {
return cnf.Type
}
// GetName returns name
func (cnf VulnDict) GetName() string {
return cnf.Name
}
// GetURL returns url
func (cnf VulnDict) GetURL() string {
return cnf.URL
}
// GetSQLite3Path return the path of SQLite3
func (cnf VulnDict) GetSQLite3Path() string {
return cnf.SQLite3Path
}
// GetDebugSQL return debugSQL flag
func (cnf VulnDict) GetDebugSQL() bool {
return cnf.DebugSQL
}
// Validate settings
func (cnf VulnDict) Validate() error {
logging.Log.Infof("%s.type=%s, %s.url=%s, %s.SQLite3Path=%s",
cnf.Name, cnf.Type, cnf.Name, cnf.URL, cnf.Name, cnf.SQLite3Path)
switch cnf.Type {
case "sqlite3":
if cnf.URL != "" {
return xerrors.Errorf("To use SQLite3, specify %s.type=sqlite3 and %s.SQLite3Path. To use as HTTP server mode, specify %s.type=http and %s.url",
cnf.Name, cnf.Name, cnf.Name, cnf.Name)
}
if ok, _ := govalidator.IsFilePath(cnf.SQLite3Path); !ok {
return xerrors.Errorf("SQLite3 path must be a *Absolute* file path. %s.SQLite3Path: %s",
cnf.Name, cnf.SQLite3Path)
}
if _, err := os.Stat(cnf.SQLite3Path); os.IsNotExist(err) {
logging.Log.Warnf("%s.SQLite3Path=%s file not found", cnf.Name, cnf.SQLite3Path)
}
case "mysql":
if cnf.URL == "" {
return xerrors.Errorf(`MySQL connection string is needed. %s.url="user:pass@tcp(localhost:3306)/dbname"`, cnf.Name)
}
case "postgres":
if cnf.URL == "" {
return xerrors.Errorf(`PostgreSQL connection string is needed. %s.url="host=myhost user=user dbname=dbname sslmode=disable password=password"`, cnf.Name)
}
case "redis":
if cnf.URL == "" {
return xerrors.Errorf(`Redis connection string is needed. %s.url="redis://localhost/0"`, cnf.Name)
}
case "http":
if cnf.URL == "" {
return xerrors.Errorf(`URL is needed. -%s-url="http://localhost:1323"`, cnf.Name)
}
default:
return xerrors.Errorf("%s.type must be either 'sqlite3', 'mysql', 'postgres', 'redis' or 'http'. %s.type: %s", cnf.Name, cnf.Name, cnf.Type)
}
return nil
}
// Init the struct
func (cnf VulnDict) Init() {}
func (cnf *VulnDict) setDefault(sqlite3Name string) {
if cnf.Type == "" {
cnf.Type = "sqlite3"
}
if cnf.URL == "" && cnf.SQLite3Path == "" {
wd, _ := os.Getwd()
cnf.SQLite3Path = filepath.Join(wd, sqlite3Name)
}
}
// IsFetchViaHTTP returns if fetch via HTTP
func (cnf VulnDict) IsFetchViaHTTP() bool {
return cnf.Type == "http"
}
// CheckHTTPHealth checks http server status
func (cnf VulnDict) CheckHTTPHealth() error {
if !cnf.IsFetchViaHTTP() {
return nil
}
url := fmt.Sprintf("%s/health", cnf.URL)
resp, _, errs := gorequest.New().Timeout(10 * time.Second).SetDebug(Conf.Debug).Get(url).End()
// resp, _, errs = gorequest.New().Proxy(api.httpProxy).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("Failed to request to CVE server. url: %s, errs: %s",
url, errs)
}
return nil
}
// GovalDictConf is goval-dictionary config
type GovalDictConf struct {
VulnDict
}
const govalType = "OVALDB_TYPE"
const govalURL = "OVALDB_URL"
const govalPATH = "OVALDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *GovalDictConf) Init() {
cnf.Name = "ovalDict"
if os.Getenv(govalType) != "" {
cnf.Type = os.Getenv(govalType)
}
if os.Getenv(govalURL) != "" {
cnf.URL = os.Getenv(govalURL)
}
if os.Getenv(govalPATH) != "" {
cnf.SQLite3Path = os.Getenv(govalPATH)
}
cnf.setDefault("oval.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}
// ExploitConf is exploit config
type ExploitConf struct {
VulnDict
}
const exploitDBType = "EXPLOITDB_TYPE"
const exploitDBURL = "EXPLOITDB_URL"
const exploitDBPATH = "EXPLOITDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *ExploitConf) Init() {
cnf.Name = "exploit"
if os.Getenv(exploitDBType) != "" {
cnf.Type = os.Getenv(exploitDBType)
}
if os.Getenv(exploitDBURL) != "" {
cnf.URL = os.Getenv(exploitDBURL)
}
if os.Getenv(exploitDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(exploitDBPATH)
}
cnf.setDefault("go-exploitdb.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}
// GoCveDictConf is GoCveDict config
type GoCveDictConf struct {
VulnDict
}
const cveDBType = "CVEDB_TYPE"
const cveDBURL = "CVEDB_URL"
const cveDBPATH = "CVEDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *GoCveDictConf) Init() {
cnf.Name = "cveDict"
if os.Getenv(cveDBType) != "" {
cnf.Type = os.Getenv(cveDBType)
}
if os.Getenv(cveDBURL) != "" {
cnf.URL = os.Getenv(cveDBURL)
}
if os.Getenv(cveDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(cveDBPATH)
}
cnf.setDefault("cve.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}
// GostConf is gost config
type GostConf struct {
VulnDict
}
const gostDBType = "GOSTDB_TYPE"
const gostDBURL = "GOSTDB_URL"
const gostDBPATH = "GOSTDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *GostConf) Init() {
cnf.Name = "gost"
if os.Getenv(gostDBType) != "" {
cnf.Type = os.Getenv(gostDBType)
}
if os.Getenv(gostDBURL) != "" {
cnf.URL = os.Getenv(gostDBURL)
}
if os.Getenv(gostDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(gostDBPATH)
}
cnf.setDefault("gost.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}
// MetasploitConf is go-msfdb config
type MetasploitConf struct {
VulnDict
}
const metasploitDBType = "METASPLOITDB_TYPE"
const metasploitDBURL = "METASPLOITDB_URL"
const metasploitDBPATH = "METASPLOITDB_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *MetasploitConf) Init() {
cnf.Name = "metasploit"
if os.Getenv(metasploitDBType) != "" {
cnf.Type = os.Getenv(metasploitDBType)
}
if os.Getenv(metasploitDBURL) != "" {
cnf.URL = os.Getenv(metasploitDBURL)
}
if os.Getenv(metasploitDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(metasploitDBPATH)
}
cnf.setDefault("go-msfdb.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}
// KEVulnConf is go-kev config
type KEVulnConf struct {
VulnDict
}
const kevulnDBType = "KEVULN_TYPE"
const kevulnDBURL = "KEVULN_URL"
const kevulnDBPATH = "KEVULN_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *KEVulnConf) Init() {
cnf.Name = "kevuln"
if os.Getenv(kevulnDBType) != "" {
cnf.Type = os.Getenv(kevulnDBType)
}
if os.Getenv(kevulnDBURL) != "" {
cnf.URL = os.Getenv(kevulnDBURL)
}
if os.Getenv(kevulnDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(kevulnDBPATH)
}
cnf.setDefault("go-kev.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}
// CtiConf is go-cti config
type CtiConf struct {
VulnDict
}
const ctiDBType = "CTI_TYPE"
const ctiDBURL = "CTI_URL"
const ctiDBPATH = "CTI_SQLITE3_PATH"
// Init set options with the following priority.
// 1. Environment variable
// 2. config.toml
func (cnf *CtiConf) Init() {
cnf.Name = "cti"
if os.Getenv(ctiDBType) != "" {
cnf.Type = os.Getenv(ctiDBType)
}
if os.Getenv(ctiDBURL) != "" {
cnf.URL = os.Getenv(ctiDBURL)
}
if os.Getenv(ctiDBPATH) != "" {
cnf.SQLite3Path = os.Getenv(ctiDBPATH)
}
cnf.setDefault("go-cti.sqlite3")
cnf.DebugSQL = Conf.DebugSQL
}

27
config/windows.go Normal file
View File

@@ -0,0 +1,27 @@
package config
import (
"os"
"golang.org/x/xerrors"
)
// WindowsConf used for Windows Update Setting
type WindowsConf struct {
ServerSelection int `toml:"serverSelection,omitempty" json:"serverSelection,omitempty"`
CabPath string `toml:"cabPath,omitempty" json:"cabPath,omitempty"`
}
// Validate validates configuration
func (c *WindowsConf) Validate() []error {
switch c.ServerSelection {
case 0, 1, 2:
case 3:
if _, err := os.Stat(c.CabPath); err != nil {
return []error{xerrors.Errorf("%s does not exist. err: %w", c.CabPath, err)}
}
default:
return []error{xerrors.Errorf("ServerSelection: %d does not support . Reference: https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-uamg/07e2bfa4-6795-4189-b007-cc50b476181a", c.ServerSelection)}
}
return nil
}

64
constant/constant.go Normal file
View File

@@ -0,0 +1,64 @@
package constant
// Global constant
// Pkg local constants should not be defined here.
// Define them in the each package.
const (
// RedHat is
RedHat = "redhat"
// Debian is
Debian = "debian"
// Ubuntu is
Ubuntu = "ubuntu"
// CentOS is
CentOS = "centos"
// Alma is
Alma = "alma"
// Rocky is
Rocky = "rocky"
// Fedora is
Fedora = "fedora"
// Amazon is
Amazon = "amazon"
// Oracle is
Oracle = "oracle"
// FreeBSD is
FreeBSD = "freebsd"
// Raspbian is
Raspbian = "raspbian"
// Windows is
Windows = "windows"
// OpenSUSE is
OpenSUSE = "opensuse"
// OpenSUSELeap is
OpenSUSELeap = "opensuse.leap"
// SUSEEnterpriseServer is
SUSEEnterpriseServer = "suse.linux.enterprise.server"
// SUSEEnterpriseDesktop is
SUSEEnterpriseDesktop = "suse.linux.enterprise.desktop"
// Alpine is
Alpine = "alpine"
// ServerTypePseudo is used for ServerInfo.Type, r.Family
ServerTypePseudo = "pseudo"
// DeepSecurity is
DeepSecurity = "deepsecurity"
)

34
contrib/Dockerfile Normal file
View File

@@ -0,0 +1,34 @@
FROM golang:alpine as builder
RUN apk add --no-cache \
git \
make \
gcc \
musl-dev
ENV REPOSITORY github.com/future-architect/vuls
COPY . $GOPATH/src/$REPOSITORY
RUN cd $GOPATH/src/$REPOSITORY && \
make build-scanner && mv vuls $GOPATH/bin && \
make build-trivy-to-vuls && mv trivy-to-vuls $GOPATH/bin && \
make build-future-vuls && mv future-vuls $GOPATH/bin && \
make build-snmp2cpe && mv snmp2cpe $GOPATH/bin
FROM alpine:3.15
ENV LOGDIR /var/log/vuls
ENV WORKDIR /vuls
RUN apk add --no-cache \
openssh-client \
ca-certificates \
git \
nmap \
&& mkdir -p $WORKDIR $LOGDIR
COPY --from=builder /go/bin/vuls /go/bin/trivy-to-vuls /go/bin/future-vuls /go/bin/snmp2cpe /usr/local/bin/
COPY --from=aquasec/trivy:latest /usr/local/bin/trivy /usr/local/bin/trivy
VOLUME ["$WORKDIR", "$LOGDIR"]
WORKDIR $WORKDIR
ENV PWD $WORKDIR

View File

@@ -7,6 +7,7 @@ import (
"fmt"
"os"
"strconv"
"strings"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/models"
@@ -21,6 +22,7 @@ var (
serverUUID string
groupID int64
token string
tags []string
url string
)
@@ -47,6 +49,9 @@ func main() {
if len(token) == 0 {
token = os.Getenv("VULS_TOKEN")
}
if len(tags) == 0 {
tags = strings.Split(os.Getenv("VULS_TAGS"), ",")
}
var scanResultJSON []byte
if stdIn {
@@ -69,6 +74,12 @@ func main() {
return
}
scanResult.ServerUUID = serverUUID
if 0 < len(tags) {
if scanResult.Optional == nil {
scanResult.Optional = map[string]interface{}{}
}
scanResult.Optional["VULS_TAGS"] = tags
}
config.Conf.Saas.GroupID = groupID
config.Conf.Saas.Token = token
@@ -81,6 +92,14 @@ func main() {
return
},
}
var cmdVersion = &cobra.Command{
Use: "version",
Short: "Show version",
Long: "Show version",
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf("future-vuls-%s-%s\n", config.Version, config.Revision)
},
}
cmdFvulsUploader.PersistentFlags().StringVar(&serverUUID, "uuid", "", "server uuid. ENV: VULS_SERVER_UUID")
cmdFvulsUploader.PersistentFlags().StringVar(&configFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
cmdFvulsUploader.PersistentFlags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin. ENV: VULS_STDIN")
@@ -92,6 +111,7 @@ func main() {
var rootCmd = &cobra.Command{Use: "future-vuls"}
rootCmd.AddCommand(cmdFvulsUploader)
rootCmd.AddCommand(cmdVersion)
if err = rootCmd.Execute(); err != nil {
fmt.Println("Failed to execute command", err)
}

View File

@@ -2,7 +2,7 @@ package parser
import (
"encoding/xml"
"io/ioutil"
"io"
"os"
"strings"
@@ -41,7 +41,7 @@ func Parse(path string) ([]string, error) {
}
defer file.Close()
b, err := ioutil.ReadAll(file)
b, err := io.ReadAll(file)
if err != nil {
log.Warnf("Failed to read OWASP Dependency Check XML: %s", path)
return []string{}, nil

View File

@@ -0,0 +1,50 @@
# snmp2cpe
## Main Features
- Estimate hardware and OS CPE from SNMP reply of network devices
## Installation
```console
$ git clone https://github.com/future-architect/vuls.git
$ make build-snmp2cpe
```
## Command Reference
```console
$ snmp2cpe help
snmp2cpe: SNMP reply To CPE
Usage:
snmp2cpe [command]
Available Commands:
completion Generate the autocompletion script for the specified shell
convert snmpget reply to CPE
help Help about any command
v1 snmpget with SNMPv1
v2c snmpget with SNMPv2c
v3 snmpget with SNMPv3
version Print the version
Flags:
-h, --help help for snmp2cpe
Use "snmp2cpe [command] --help" for more information about a command.
```
## Usage
```console
$ snmp2cpe v2c --debug 192.168.1.99 public
2023/03/28 14:16:54 DEBUG: .1.3.6.1.2.1.1.1.0 ->
2023/03/28 14:16:54 DEBUG: .1.3.6.1.2.1.47.1.1.1.1.12.1 -> Fortinet
2023/03/28 14:16:54 DEBUG: .1.3.6.1.2.1.47.1.1.1.1.7.1 -> FGT_50E
2023/03/28 14:16:54 DEBUG: .1.3.6.1.2.1.47.1.1.1.1.10.1 -> FortiGate-50E v5.4.6,build1165b1165,171018 (GA)
{"192.168.1.99":{"entPhysicalTables":{"1":{"entPhysicalMfgName":"Fortinet","entPhysicalName":"FGT_50E","entPhysicalSoftwareRev":"FortiGate-50E v5.4.6,build1165b1165,171018 (GA)"}}}}
$ snmp2cpe v2c 192.168.1.99 public | snmp2cpe convert
{"192.168.1.99":["cpe:2.3:h:fortinet:fortigate-50e:-:*:*:*:*:*:*:*","cpe:2.3:o:fortinet:fortios:5.4.6:*:*:*:*:*:*:*"]}
```

View File

@@ -0,0 +1,15 @@
package main
import (
"fmt"
"os"
rootCmd "github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cmd/root"
)
func main() {
if err := rootCmd.NewCmdRoot().Execute(); err != nil {
fmt.Fprintf(os.Stderr, "failed to exec snmp2cpe: %s\n", fmt.Sprintf("%+v", err))
os.Exit(1)
}
}

View File

@@ -0,0 +1,52 @@
package convert
import (
"encoding/json"
"os"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cpe"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/snmp"
)
// NewCmdConvert ...
func NewCmdConvert() *cobra.Command {
cmd := &cobra.Command{
Use: "convert",
Short: "snmpget reply to CPE",
Args: cobra.MaximumNArgs(1),
Example: `$ snmp2cpe v2c 192.168.11.11 public | snmp2cpe convert
$ snmp2cpe v2c 192.168.11.11 public | snmp2cpe convert -
$ snmp2cpe v2c 192.168.11.11 public > v2c.json && snmp2cpe convert v2c.json`,
RunE: func(_ *cobra.Command, args []string) error {
r := os.Stdin
if len(args) == 1 && args[0] != "-" {
f, err := os.Open(args[0])
if err != nil {
return errors.Wrapf(err, "failed to open %s", args[0])
}
defer f.Close()
r = f
}
var reply map[string]snmp.Result
if err := json.NewDecoder(r).Decode(&reply); err != nil {
return errors.Wrap(err, "failed to decode")
}
converted := map[string][]string{}
for ipaddr, res := range reply {
converted[ipaddr] = cpe.Convert(res)
}
if err := json.NewEncoder(os.Stdout).Encode(converted); err != nil {
return errors.Wrap(err, "failed to encode")
}
return nil
},
}
return cmd
}

View File

@@ -0,0 +1,30 @@
package root
import (
"github.com/spf13/cobra"
convertCmd "github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cmd/convert"
v1Cmd "github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cmd/v1"
v2cCmd "github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cmd/v2c"
v3Cmd "github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cmd/v3"
versionCmd "github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cmd/version"
)
// NewCmdRoot ...
func NewCmdRoot() *cobra.Command {
cmd := &cobra.Command{
Use: "snmp2cpe <command>",
Short: "snmp2cpe",
Long: "snmp2cpe: SNMP reply To CPE",
SilenceErrors: true,
SilenceUsage: true,
}
cmd.AddCommand(v1Cmd.NewCmdV1())
cmd.AddCommand(v2cCmd.NewCmdV2c())
cmd.AddCommand(v3Cmd.NewCmdV3())
cmd.AddCommand(convertCmd.NewCmdConvert())
cmd.AddCommand(versionCmd.NewCmdVersion())
return cmd
}

View File

@@ -0,0 +1,47 @@
package v1
import (
"encoding/json"
"os"
"github.com/gosnmp/gosnmp"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/snmp"
)
// SNMPv1Options ...
type SNMPv1Options struct {
Debug bool
}
// NewCmdV1 ...
func NewCmdV1() *cobra.Command {
opts := &SNMPv1Options{
Debug: false,
}
cmd := &cobra.Command{
Use: "v1 <IP Address> <Community>",
Short: "snmpget with SNMPv1",
Example: "$ snmp2cpe v1 192.168.100.1 public",
Args: cobra.ExactArgs(2),
RunE: func(_ *cobra.Command, args []string) error {
r, err := snmp.Get(gosnmp.Version1, args[0], snmp.WithCommunity(args[1]), snmp.WithDebug(opts.Debug))
if err != nil {
return errors.Wrap(err, "failed to snmpget")
}
if err := json.NewEncoder(os.Stdout).Encode(map[string]snmp.Result{args[0]: r}); err != nil {
return errors.Wrap(err, "failed to encode")
}
return nil
},
}
cmd.Flags().BoolVarP(&opts.Debug, "debug", "", false, "debug mode")
return cmd
}

View File

@@ -0,0 +1,47 @@
package v2c
import (
"encoding/json"
"os"
"github.com/gosnmp/gosnmp"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/snmp"
)
// SNMPv2cOptions ...
type SNMPv2cOptions struct {
Debug bool
}
// NewCmdV2c ...
func NewCmdV2c() *cobra.Command {
opts := &SNMPv2cOptions{
Debug: false,
}
cmd := &cobra.Command{
Use: "v2c <IP Address> <Community>",
Short: "snmpget with SNMPv2c",
Example: "$ snmp2cpe v2c 192.168.100.1 public",
Args: cobra.ExactArgs(2),
RunE: func(_ *cobra.Command, args []string) error {
r, err := snmp.Get(gosnmp.Version2c, args[0], snmp.WithCommunity(args[1]), snmp.WithDebug(opts.Debug))
if err != nil {
return errors.Wrap(err, "failed to snmpget")
}
if err := json.NewEncoder(os.Stdout).Encode(map[string]snmp.Result{args[0]: r}); err != nil {
return errors.Wrap(err, "failed to encode")
}
return nil
},
}
cmd.Flags().BoolVarP(&opts.Debug, "debug", "", false, "debug mode")
return cmd
}

View File

@@ -0,0 +1,39 @@
package v3
import (
"github.com/gosnmp/gosnmp"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/snmp"
)
// SNMPv3Options ...
type SNMPv3Options struct {
Debug bool
}
// NewCmdV3 ...
func NewCmdV3() *cobra.Command {
opts := &SNMPv3Options{
Debug: false,
}
cmd := &cobra.Command{
Use: "v3 <args>",
Short: "snmpget with SNMPv3",
Example: "$ snmp2cpe v3",
RunE: func(_ *cobra.Command, _ []string) error {
_, err := snmp.Get(gosnmp.Version3, "", snmp.WithDebug(opts.Debug))
if err != nil {
return errors.Wrap(err, "failed to snmpget")
}
return nil
},
}
cmd.Flags().BoolVarP(&opts.Debug, "debug", "", false, "debug mode")
return cmd
}

View File

@@ -0,0 +1,23 @@
package version
import (
"fmt"
"os"
"github.com/spf13/cobra"
"github.com/future-architect/vuls/config"
)
// NewCmdVersion ...
func NewCmdVersion() *cobra.Command {
cmd := &cobra.Command{
Use: "version",
Short: "Print the version",
Args: cobra.NoArgs,
Run: func(_ *cobra.Command, _ []string) {
fmt.Fprintf(os.Stdout, "snmp2cpe %s %s\n", config.Version, config.Revision)
},
}
return cmd
}

View File

@@ -0,0 +1,212 @@
package cpe
import (
"fmt"
"strings"
"github.com/hashicorp/go-version"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/snmp"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/util"
)
// Convert ...
func Convert(result snmp.Result) []string {
var cpes []string
switch detectVendor(result) {
case "Cisco":
var p, v string
lhs, _, _ := strings.Cut(result.SysDescr0, " RELEASE SOFTWARE")
for _, s := range strings.Split(lhs, ",") {
s = strings.TrimSpace(s)
switch {
case strings.Contains(s, "Cisco NX-OS"):
p = "nx-os"
case strings.Contains(s, "Cisco IOS Software"), strings.Contains(s, "Cisco Internetwork Operating System Software IOS"):
p = "ios"
if strings.Contains(lhs, "IOSXE") || strings.Contains(lhs, "IOS-XE") {
p = "ios_xe"
}
case strings.HasPrefix(s, "Version "):
v = strings.ToLower(strings.TrimPrefix(s, "Version "))
}
}
if p != "" && v != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:cisco:%s:%s:*:*:*:*:*:*:*", p, v))
}
if t, ok := result.EntPhysicalTables[1]; ok {
if t.EntPhysicalName != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:cisco:%s:-:*:*:*:*:*:*:*", strings.ToLower(t.EntPhysicalName)))
}
if p != "" && t.EntPhysicalSoftwareRev != "" {
s, _, _ := strings.Cut(t.EntPhysicalSoftwareRev, " RELEASE SOFTWARE")
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:cisco:%s:%s:*:*:*:*:*:*:*", p, strings.ToLower(strings.TrimSuffix(s, ","))))
}
}
case "Juniper Networks":
if strings.HasPrefix(result.SysDescr0, "Juniper Networks, Inc.") {
for _, s := range strings.Split(strings.TrimPrefix(result.SysDescr0, "Juniper Networks, Inc. "), ",") {
s = strings.TrimSpace(s)
switch {
case strings.HasPrefix(s, "qfx"), strings.HasPrefix(s, "ex"), strings.HasPrefix(s, "mx"), strings.HasPrefix(s, "ptx"), strings.HasPrefix(s, "acx"), strings.HasPrefix(s, "bti"), strings.HasPrefix(s, "srx"):
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:juniper:%s:-:*:*:*:*:*:*:*", strings.Fields(s)[0]))
case strings.HasPrefix(s, "kernel JUNOS "):
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:juniper:junos:%s:*:*:*:*:*:*:*", strings.ToLower(strings.Fields(strings.TrimPrefix(s, "kernel JUNOS "))[0])))
}
}
if t, ok := result.EntPhysicalTables[1]; ok {
if t.EntPhysicalSoftwareRev != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:juniper:junos:%s:*:*:*:*:*:*:*", strings.ToLower(t.EntPhysicalSoftwareRev)))
}
}
} else {
h, v, ok := strings.Cut(result.SysDescr0, " version ")
if ok {
cpes = append(cpes,
fmt.Sprintf("cpe:2.3:h:juniper:%s:-:*:*:*:*:*:*:*", strings.ToLower(h)),
fmt.Sprintf("cpe:2.3:o:juniper:screenos:%s:*:*:*:*:*:*:*", strings.ToLower(strings.Fields(v)[0])),
)
}
}
case "Arista Networks":
v, h, ok := strings.Cut(result.SysDescr0, " running on an ")
if ok {
if strings.HasPrefix(v, "Arista Networks EOS version ") {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:arista:eos:%s:*:*:*:*:*:*:*", strings.ToLower(strings.TrimPrefix(v, "Arista Networks EOS version "))))
}
cpes = append(cpes, fmt.Sprintf("cpe:/h:arista:%s:-:*:*:*:*:*:*:*", strings.ToLower(strings.TrimPrefix(h, "Arista Networks "))))
}
if t, ok := result.EntPhysicalTables[1]; ok {
if t.EntPhysicalSoftwareRev != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:arista:eos:%s:*:*:*:*:*:*:*", strings.ToLower(t.EntPhysicalSoftwareRev)))
}
}
case "Fortinet":
if t, ok := result.EntPhysicalTables[1]; ok {
if strings.HasPrefix(t.EntPhysicalName, "FGT_") {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:fortinet:fortigate-%s:-:*:*:*:*:*:*:*", strings.ToLower(strings.TrimPrefix(t.EntPhysicalName, "FGT_"))))
}
for _, s := range strings.Fields(t.EntPhysicalSoftwareRev) {
switch {
case strings.HasPrefix(s, "FortiGate-"):
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:fortinet:%s:-:*:*:*:*:*:*:*", strings.ToLower(s)))
case strings.HasPrefix(s, "v") && strings.Contains(s, "build"):
if v, _, found := strings.Cut(strings.TrimPrefix(s, "v"), ",build"); found {
if _, err := version.NewVersion(v); err == nil {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:fortinet:fortios:%s:*:*:*:*:*:*:*", v))
}
}
}
}
}
case "YAMAHA":
var h, v string
for _, s := range strings.Fields(result.SysDescr0) {
switch {
case strings.HasPrefix(s, "RTX"), strings.HasPrefix(s, "NVR"), strings.HasPrefix(s, "RTV"), strings.HasPrefix(s, "RT"),
strings.HasPrefix(s, "SRT"), strings.HasPrefix(s, "FWX"), strings.HasPrefix(s, "YSL-V810"):
h = strings.ToLower(s)
case strings.HasPrefix(s, "Rev."):
if _, err := version.NewVersion(strings.TrimPrefix(s, "Rev.")); err == nil {
v = strings.TrimPrefix(s, "Rev.")
}
}
}
if h != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:yamaha:%s:-:*:*:*:*:*:*:*", h))
if v != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:yamaha:%s:%s:*:*:*:*:*:*:*", h, v))
}
}
case "NEC":
var h, v string
for _, s := range strings.Split(result.SysDescr0, ",") {
s = strings.TrimSpace(s)
switch {
case strings.HasPrefix(s, "IX Series "):
h = strings.ToLower(strings.TrimSuffix(strings.TrimPrefix(s, "IX Series "), " (magellan-sec) Software"))
case strings.HasPrefix(s, "Version "):
if _, err := version.NewVersion(strings.TrimSpace(strings.TrimPrefix(s, "Version "))); err == nil {
v = strings.TrimSpace(strings.TrimPrefix(s, "Version "))
}
}
}
if h != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:nec:%s:-:*:*:*:*:*:*:*", h))
if v != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:nec:%s:%s:*:*:*:*:*:*:*", h, v))
}
}
case "Palo Alto Networks":
if t, ok := result.EntPhysicalTables[1]; ok {
if t.EntPhysicalName != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:h:paloaltonetworks:%s:-:*:*:*:*:*:*:*", strings.ToLower(t.EntPhysicalName)))
}
if t.EntPhysicalSoftwareRev != "" {
cpes = append(cpes, fmt.Sprintf("cpe:2.3:o:paloaltonetworks:pan-os:%s:*:*:*:*:*:*:*", t.EntPhysicalSoftwareRev))
}
}
default:
return []string{}
}
return util.Unique(cpes)
}
func detectVendor(r snmp.Result) string {
if t, ok := r.EntPhysicalTables[1]; ok {
switch t.EntPhysicalMfgName {
case "Cisco":
return "Cisco"
case "Juniper Networks":
return "Juniper Networks"
case "Arista Networks":
return "Arista Networks"
case "Fortinet":
return "Fortinet"
case "YAMAHA":
return "YAMAHA"
case "NEC":
return "NEC"
case "Palo Alto Networks":
return "Palo Alto Networks"
}
}
switch {
case strings.Contains(r.SysDescr0, "Cisco"):
return "Cisco"
case strings.Contains(r.SysDescr0, "Juniper Networks"),
strings.Contains(r.SysDescr0, "SSG5"), strings.Contains(r.SysDescr0, "SSG20"), strings.Contains(r.SysDescr0, "SSG140"),
strings.Contains(r.SysDescr0, "SSG320"), strings.Contains(r.SysDescr0, "SSG350"), strings.Contains(r.SysDescr0, "SSG520"),
strings.Contains(r.SysDescr0, "SSG550"):
return "Juniper Networks"
case strings.Contains(r.SysDescr0, "Arista Networks"):
return "Arista Networks"
case strings.Contains(r.SysDescr0, "Fortinet"), strings.Contains(r.SysDescr0, "FortiGate"):
return "Fortinet"
case strings.Contains(r.SysDescr0, "YAMAHA"),
strings.Contains(r.SysDescr0, "RTX810"), strings.Contains(r.SysDescr0, "RTX830"),
strings.Contains(r.SysDescr0, "RTX1000"), strings.Contains(r.SysDescr0, "RTX1100"),
strings.Contains(r.SysDescr0, "RTX1200"), strings.Contains(r.SysDescr0, "RTX1210"), strings.Contains(r.SysDescr0, "RTX1220"),
strings.Contains(r.SysDescr0, "RTX1300"), strings.Contains(r.SysDescr0, "RTX1500"), strings.Contains(r.SysDescr0, "RTX2000"),
strings.Contains(r.SysDescr0, "RTX3000"), strings.Contains(r.SysDescr0, "RTX3500"), strings.Contains(r.SysDescr0, "RTX5000"),
strings.Contains(r.SysDescr0, "NVR500"), strings.Contains(r.SysDescr0, "NVR510"), strings.Contains(r.SysDescr0, "NVR700W"),
strings.Contains(r.SysDescr0, "RTV01"), strings.Contains(r.SysDescr0, "RTV700"),
strings.Contains(r.SysDescr0, "RT105i"), strings.Contains(r.SysDescr0, "RT105p"), strings.Contains(r.SysDescr0, "RT105e"),
strings.Contains(r.SysDescr0, "RT107e"), strings.Contains(r.SysDescr0, "RT250i"), strings.Contains(r.SysDescr0, "RT300i"),
strings.Contains(r.SysDescr0, "SRT100"),
strings.Contains(r.SysDescr0, "FWX100"),
strings.Contains(r.SysDescr0, "YSL-V810"):
return "YAMAHA"
case strings.Contains(r.SysDescr0, "NEC"):
return "NEC"
case strings.Contains(r.SysDescr0, "Palo Alto Networks"):
return "Palo Alto Networks"
default:
return ""
}
}

View File

@@ -0,0 +1,244 @@
package cpe_test
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/cpe"
"github.com/future-architect/vuls/contrib/snmp2cpe/pkg/snmp"
)
func TestConvert(t *testing.T) {
tests := []struct {
name string
args snmp.Result
want []string
}{
{
name: "Cisco NX-OS Version 7.1(4)N1(1)",
args: snmp.Result{
SysDescr0: "Cisco NX-OS(tm) n6000, Software (n6000-uk9), Version 7.1(4)N1(1), RELEASE SOFTWARE Copyright (c) 2002-2012 by Cisco Systems, Inc. Device Manager Version 6.0(2)N1(1),Compiled 9/2/2016 10:00:00",
},
want: []string{"cpe:2.3:o:cisco:nx-os:7.1(4)n1(1):*:*:*:*:*:*:*"},
},
{
name: "Cisco IOS Version 15.1(4)M3",
args: snmp.Result{
SysDescr0: `Cisco IOS Software, 2800 Software (C2800NM-ADVENTERPRISEK9-M), Version 15.1(4)M3, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2011 by Cisco Systems, Inc.
Compiled Tue 06-Dec-11 16:21 by prod_rel_team`,
},
want: []string{"cpe:2.3:o:cisco:ios:15.1(4)m3:*:*:*:*:*:*:*"},
},
{
name: "Cisco IOS Version 15.1(4)M4",
args: snmp.Result{
SysDescr0: `Cisco IOS Software, C181X Software (C181X-ADVENTERPRISEK9-M), Version 15.1(4)M4, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2012 by Cisco Systems, Inc.
Compiled Tue 20-Mar-12 23:34 by prod_rel_team`,
},
want: []string{"cpe:2.3:o:cisco:ios:15.1(4)m4:*:*:*:*:*:*:*"},
},
{
name: "Cisco IOS Vresion 15.5(3)M on Cisco 892J-K9-V02",
args: snmp.Result{
SysDescr0: `Cisco IOS Software, C890 Software (C890-UNIVERSALK9-M), Version 15.5(3)M, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2015 by Cisco Systems, Inc.
Compiled Thu 23-Jul-15 03:08 by prod_rel_team`,
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Cisco",
EntPhysicalName: "892",
EntPhysicalSoftwareRev: "15.5(3)M, RELEASE SOFTWARE (fc1)",
}},
},
want: []string{"cpe:2.3:h:cisco:892:-:*:*:*:*:*:*:*", "cpe:2.3:o:cisco:ios:15.5(3)m:*:*:*:*:*:*:*"},
},
{
name: "Cisco IOS Version 15.4(3)M5 on Cisco C892FSP-K9-V02",
args: snmp.Result{
SysDescr0: `Cisco IOS Software, C800 Software (C800-UNIVERSALK9-M), Version 15.4(3)M5, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2016 by Cisco Systems, Inc.
Compiled Tue 09-Feb-16 06:15 by prod_rel_team`,
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Cisco",
EntPhysicalName: "C892FSP-K9",
EntPhysicalSoftwareRev: "15.4(3)M5, RELEASE SOFTWARE (fc1)",
}},
},
want: []string{"cpe:2.3:h:cisco:c892fsp-k9:-:*:*:*:*:*:*:*", "cpe:2.3:o:cisco:ios:15.4(3)m5:*:*:*:*:*:*:*"},
},
{
name: "Cisco IOS Version 12.2(17d)SXB11",
args: snmp.Result{
SysDescr0: `Cisco Internetwork Operating System Software IOS (tm) s72033_rp Software (s72033_rp-JK9SV-M), Version 12.2(17d)SXB11, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2005 by cisco Systems, Inc.`,
},
want: []string{"cpe:2.3:o:cisco:ios:12.2(17d)sxb11:*:*:*:*:*:*:*"},
},
{
name: "Cisco IOX-XE Version 16.12.4",
args: snmp.Result{
SysDescr0: `Cisco IOS Software [Gibraltar], Catalyst L3 Switch Software (CAT9K_LITE_IOSXE), Version 16.12.4, RELEASE SOFTWARE (fc5)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2020 by Cisco Systems, Inc.
Compiled Thu 09-Jul-20 19:31 by m`,
},
want: []string{"cpe:2.3:o:cisco:ios_xe:16.12.4:*:*:*:*:*:*:*"},
},
{
name: "Cisco IOX-XE Version 03.06.07.E",
args: snmp.Result{
SysDescr0: `Cisco IOS Software, IOS-XE Software, Catalyst 4500 L3 Switch Software (cat4500es8-UNIVERSALK9-M), Version 03.06.07.E RELEASE SOFTWARE (fc3)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2017 by Cisco Systems, Inc.
Compiled Wed`,
},
want: []string{"cpe:2.3:o:cisco:ios_xe:03.06.07.e:*:*:*:*:*:*:*"},
},
{
name: "Juniper SSG-5-SH-BT",
args: snmp.Result{
SysDescr0: "SSG5-ISDN version 6.3.0r14.0 (SN: 0000000000000001, Firewall+VPN)",
},
want: []string{"cpe:2.3:h:juniper:ssg5-isdn:-:*:*:*:*:*:*:*", "cpe:2.3:o:juniper:screenos:6.3.0r14.0:*:*:*:*:*:*:*"},
},
{
name: "JUNOS 20.4R3-S4.8 on Juniper MX240",
args: snmp.Result{
SysDescr0: "Juniper Networks, Inc. mx240 internet router, kernel JUNOS 20.4R3-S4.8, Build date: 2022-08-16 20:42:11 UTC Copyright (c) 1996-2022 Juniper Networks, Inc.",
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Juniper Networks",
EntPhysicalName: "CHAS-BP3-MX240-S",
EntPhysicalSoftwareRev: "20.4R3-S4.8",
}},
},
want: []string{"cpe:2.3:h:juniper:mx240:-:*:*:*:*:*:*:*", "cpe:2.3:o:juniper:junos:20.4r3-s4.8:*:*:*:*:*:*:*"},
},
{
name: "JUNOS 12.1X46-D65.4 on Juniper SRX220H",
args: snmp.Result{
SysDescr0: "Juniper Networks, Inc. srx220h internet router, kernel JUNOS 12.1X46-D65.4 #0: 2016-12-30 01:34:30 UTC builder@quoarth.juniper.net:/volume/build/junos/12.1/service/12.1X46-D65.4/obj-octeon/junos/bsd/kernels/JSRXNLE/kernel Build date: 2016-12-30 02:59",
},
want: []string{"cpe:2.3:h:juniper:srx220h:-:*:*:*:*:*:*:*", "cpe:2.3:o:juniper:junos:12.1x46-d65.4:*:*:*:*:*:*:*"},
},
{
name: "JUNOS 12.3X48-D30.7 on Juniper SRX220H2",
args: snmp.Result{
SysDescr0: "Juniper Networks, Inc. srx220h2 internet router, kernel JUNOS 12.3X48-D30.7, Build date: 2016-04-29 00:01:04 UTC Copyright (c) 1996-2016 Juniper Networks, Inc.",
},
want: []string{"cpe:2.3:h:juniper:srx220h2:-:*:*:*:*:*:*:*", "cpe:2.3:o:juniper:junos:12.3x48-d30.7:*:*:*:*:*:*:*"},
},
{
name: "JUNOS 20.4R3-S4.8 on Juniper SRX4600",
args: snmp.Result{
SysDescr0: "Juniper Networks, Inc. srx4600 internet router, kernel JUNOS 20.4R3-S4.8, Build date: 2022-08-16 20:42:11 UTC Copyright (c) 1996-2022 Juniper Networks, Inc.",
},
want: []string{"cpe:2.3:h:juniper:srx4600:-:*:*:*:*:*:*:*", "cpe:2.3:o:juniper:junos:20.4r3-s4.8:*:*:*:*:*:*:*"},
},
{
name: "cpe:2.3:o:juniper:junos:20.4:r2-s2.2:*:*:*:*:*:*",
args: snmp.Result{
SysDescr0: "Juniper Networks, Inc. ex4300-32f Ethernet Switch, kernel JUNOS 20.4R3-S4.8, Build date: 2022-08-16 21:10:45 UTC Copyright (c) 1996-2022 Juniper Networks, Inc.",
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Juniper Networks",
EntPhysicalName: "",
EntPhysicalSoftwareRev: "20.4R3-S4.8",
}},
},
want: []string{"cpe:2.3:h:juniper:ex4300-32f:-:*:*:*:*:*:*:*", "cpe:2.3:o:juniper:junos:20.4r3-s4.8:*:*:*:*:*:*:*"},
},
{
name: "Arista Networks EOS version 4.28.4M on DCS-7050TX-64",
args: snmp.Result{
SysDescr0: "Arista Networks EOS version 4.28.4M running on an Arista Networks DCS-7050TX-64",
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Arista Networks",
EntPhysicalName: "",
EntPhysicalSoftwareRev: "4.28.4M",
}},
},
want: []string{"cpe:/h:arista:dcs-7050tx-64:-:*:*:*:*:*:*:*", "cpe:2.3:o:arista:eos:4.28.4m:*:*:*:*:*:*:*"},
},
{
name: "FortiGate-50E",
args: snmp.Result{
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Fortinet",
EntPhysicalName: "FGT_50E",
EntPhysicalSoftwareRev: "FortiGate-50E v5.4.6,build1165b1165,171018 (GA)",
}},
},
want: []string{"cpe:2.3:h:fortinet:fortigate-50e:-:*:*:*:*:*:*:*", "cpe:2.3:o:fortinet:fortios:5.4.6:*:*:*:*:*:*:*"},
},
{
name: "FortiGate-60F",
args: snmp.Result{
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Fortinet",
EntPhysicalName: "FGT_60F",
EntPhysicalSoftwareRev: "FortiGate-60F v6.4.11,build2030,221031 (GA.M)",
}},
},
want: []string{"cpe:2.3:h:fortinet:fortigate-60f:-:*:*:*:*:*:*:*", "cpe:2.3:o:fortinet:fortios:6.4.11:*:*:*:*:*:*:*"},
},
{
name: "YAMAHA RTX1000",
args: snmp.Result{
SysDescr0: "RTX1000 Rev.8.01.29 (Fri Apr 15 11:50:44 2011)",
},
want: []string{"cpe:2.3:h:yamaha:rtx1000:-:*:*:*:*:*:*:*", "cpe:2.3:o:yamaha:rtx1000:8.01.29:*:*:*:*:*:*:*"},
},
{
name: "YAMAHA RTX810",
args: snmp.Result{
SysDescr0: "RTX810 Rev.11.01.34 (Tue Nov 26 18:39:12 2019)",
},
want: []string{"cpe:2.3:h:yamaha:rtx810:-:*:*:*:*:*:*:*", "cpe:2.3:o:yamaha:rtx810:11.01.34:*:*:*:*:*:*:*"},
},
{
name: "NEC IX2105",
args: snmp.Result{
SysDescr0: "NEC Portable Internetwork Core Operating System Software, IX Series IX2105 (magellan-sec) Software, Version 8.8.22, RELEASE SOFTWARE, Compiled Jul 04-Wed-2012 14:18:46 JST #2, IX2105",
},
want: []string{"cpe:2.3:h:nec:ix2105:-:*:*:*:*:*:*:*", "cpe:2.3:o:nec:ix2105:8.8.22:*:*:*:*:*:*:*"},
},
{
name: "NEC IX2235",
args: snmp.Result{
SysDescr0: "NEC Portable Internetwork Core Operating System Software, IX Series IX2235 (magellan-sec) Software, Version 10.6.21, RELEASE SOFTWARE, Compiled Dec 15-Fri-YYYY HH:MM:SS JST #2, IX2235",
},
want: []string{"cpe:2.3:h:nec:ix2235:-:*:*:*:*:*:*:*", "cpe:2.3:o:nec:ix2235:10.6.21:*:*:*:*:*:*:*"},
},
{
name: "Palo Alto Networks PAN-OS 10.0.0 on PA-220",
args: snmp.Result{
SysDescr0: "Palo Alto Networks PA-220 series firewall",
EntPhysicalTables: map[int]snmp.EntPhysicalTable{1: {
EntPhysicalMfgName: "Palo Alto Networks",
EntPhysicalName: "PA-220",
EntPhysicalSoftwareRev: "10.0.0",
}},
},
want: []string{"cpe:2.3:h:paloaltonetworks:pa-220:-:*:*:*:*:*:*:*", "cpe:2.3:o:paloaltonetworks:pan-os:10.0.0:*:*:*:*:*:*:*"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
opts := []cmp.Option{
cmpopts.SortSlices(func(i, j string) bool {
return i < j
}),
}
if diff := cmp.Diff(cpe.Convert(tt.args), tt.want, opts...); diff != "" {
t.Errorf("Convert() value is mismatch (-got +want):%s\n", diff)
}
})
}
}

View File

@@ -0,0 +1,131 @@
package snmp
import (
"log"
"strconv"
"strings"
"time"
"github.com/gosnmp/gosnmp"
"github.com/pkg/errors"
)
type options struct {
community string
debug bool
}
// Option ...
type Option interface {
apply(*options)
}
type communityOption string
func (c communityOption) apply(opts *options) {
opts.community = string(c)
}
// WithCommunity ...
func WithCommunity(c string) Option {
return communityOption(c)
}
type debugOption bool
func (d debugOption) apply(opts *options) {
opts.debug = bool(d)
}
// WithDebug ...
func WithDebug(d bool) Option {
return debugOption(d)
}
// Get ...
func Get(version gosnmp.SnmpVersion, ipaddr string, opts ...Option) (Result, error) {
var options options
for _, o := range opts {
o.apply(&options)
}
r := Result{SysDescr0: "", EntPhysicalTables: map[int]EntPhysicalTable{}}
params := &gosnmp.GoSNMP{
Target: ipaddr,
Port: 161,
Version: version,
Timeout: time.Duration(2) * time.Second,
Retries: 3,
ExponentialTimeout: true,
MaxOids: gosnmp.MaxOids,
}
switch version {
case gosnmp.Version1, gosnmp.Version2c:
params.Community = options.community
case gosnmp.Version3:
return Result{}, errors.New("not implemented")
}
if err := params.Connect(); err != nil {
return Result{}, errors.Wrap(err, "failed to connect")
}
defer params.Conn.Close()
for _, oid := range []string{"1.3.6.1.2.1.1.1.0", "1.3.6.1.2.1.47.1.1.1.1.12.1", "1.3.6.1.2.1.47.1.1.1.1.7.1", "1.3.6.1.2.1.47.1.1.1.1.10.1"} {
resp, err := params.Get([]string{oid})
if err != nil {
return Result{}, errors.Wrap(err, "send SNMP GET request")
}
for _, v := range resp.Variables {
if options.debug {
switch v.Type {
case gosnmp.OctetString:
log.Printf("DEBUG: %s -> %s", v.Name, string(v.Value.([]byte)))
default:
log.Printf("DEBUG: %s -> %v", v.Name, v.Value)
}
}
switch {
case v.Name == ".1.3.6.1.2.1.1.1.0":
if v.Type == gosnmp.OctetString {
r.SysDescr0 = string(v.Value.([]byte))
}
case strings.HasPrefix(v.Name, ".1.3.6.1.2.1.47.1.1.1.1.12."):
i, err := strconv.Atoi(strings.TrimPrefix(v.Name, ".1.3.6.1.2.1.47.1.1.1.1.12."))
if err != nil {
return Result{}, errors.Wrap(err, "failed to get index")
}
if v.Type == gosnmp.OctetString {
b := r.EntPhysicalTables[i]
b.EntPhysicalMfgName = string(v.Value.([]byte))
r.EntPhysicalTables[i] = b
}
case strings.HasPrefix(v.Name, ".1.3.6.1.2.1.47.1.1.1.1.7."):
i, err := strconv.Atoi(strings.TrimPrefix(v.Name, ".1.3.6.1.2.1.47.1.1.1.1.7."))
if err != nil {
return Result{}, errors.Wrap(err, "failed to get index")
}
if v.Type == gosnmp.OctetString {
b := r.EntPhysicalTables[i]
b.EntPhysicalName = string(v.Value.([]byte))
r.EntPhysicalTables[i] = b
}
case strings.HasPrefix(v.Name, ".1.3.6.1.2.1.47.1.1.1.1.10."):
i, err := strconv.Atoi(strings.TrimPrefix(v.Name, ".1.3.6.1.2.1.47.1.1.1.1.10."))
if err != nil {
return Result{}, errors.Wrap(err, "failed to get index")
}
if v.Type == gosnmp.OctetString {
b := r.EntPhysicalTables[i]
b.EntPhysicalSoftwareRev = string(v.Value.([]byte))
r.EntPhysicalTables[i] = b
}
}
}
}
return r, nil
}

View File

@@ -0,0 +1,14 @@
package snmp
// Result ...
type Result struct {
SysDescr0 string `json:"sysDescr0,omitempty"`
EntPhysicalTables map[int]EntPhysicalTable `json:"entPhysicalTables,omitempty"`
}
// EntPhysicalTable ...
type EntPhysicalTable struct {
EntPhysicalMfgName string `json:"entPhysicalMfgName,omitempty"`
EntPhysicalName string `json:"entPhysicalName,omitempty"`
EntPhysicalSoftwareRev string `json:"entPhysicalSoftwareRev,omitempty"`
}

View File

@@ -0,0 +1,12 @@
package util
import "golang.org/x/exp/maps"
// Unique return unique elements
func Unique[T comparable](s []T) []T {
m := map[T]struct{}{}
for _, v := range s {
m[v] = struct{}{}
}
return maps.Keys(m)
}

View File

@@ -5,12 +5,11 @@ import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/contrib/trivy/parser"
"github.com/future-architect/vuls/models"
"github.com/spf13/cobra"
)
@@ -34,45 +33,55 @@ func main() {
reader := bufio.NewReader(os.Stdin)
buf := new(bytes.Buffer)
if _, err = buf.ReadFrom(reader); err != nil {
fmt.Printf("Failed to read file. err: %+v\n", err)
os.Exit(1)
return
}
trivyJSON = buf.Bytes()
} else {
if trivyJSON, err = ioutil.ReadFile(jsonFilePath); err != nil {
fmt.Println("Failed to read file", err)
if trivyJSON, err = os.ReadFile(jsonFilePath); err != nil {
fmt.Printf("Failed to read file. err: %+v\n", err)
os.Exit(1)
return
}
}
scanResult := &models.ScanResult{
JSONVersion: models.JSONVersion,
ScannedCves: models.VulnInfos{},
}
if scanResult, err = parser.Parse(trivyJSON, scanResult); err != nil {
fmt.Println("Failed to execute command", err)
parser, err := parser.NewParser(trivyJSON)
if err != nil {
fmt.Printf("Failed to new parser. err: %+v\n", err)
os.Exit(1)
}
scanResult, err := parser.Parse(trivyJSON)
if err != nil {
fmt.Printf("Failed to parse. err: %+v\n", err)
os.Exit(1)
return
}
var resultJSON []byte
if resultJSON, err = json.MarshalIndent(scanResult, "", " "); err != nil {
fmt.Println("Failed to create json", err)
fmt.Printf("Failed to create json. err: %+v\n", err)
os.Exit(1)
return
}
fmt.Println(string(resultJSON))
return
},
}
var cmdVersion = &cobra.Command{
Use: "version",
Short: "Show version",
Long: "Show version",
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf("trivy-to-vuls-%s-%s\n", config.Version, config.Revision)
},
}
cmdTrivyToVuls.Flags().BoolVarP(&stdIn, "stdin", "s", false, "input from stdin")
cmdTrivyToVuls.Flags().StringVarP(&jsonDir, "trivy-json-dir", "d", "./", "trivy json dir")
cmdTrivyToVuls.Flags().StringVarP(&jsonFileName, "trivy-json-file-name", "f", "results.json", "trivy json file name")
var rootCmd = &cobra.Command{Use: "trivy-to-vuls"}
rootCmd.AddCommand(cmdTrivyToVuls)
rootCmd.AddCommand(cmdVersion)
if err = rootCmd.Execute(); err != nil {
fmt.Println("Failed to execute command", err)
fmt.Printf("Failed to execute command. err: %+v\n", err)
os.Exit(1)
}
os.Exit(0)
}

View File

@@ -2,163 +2,32 @@ package parser
import (
"encoding/json"
"sort"
"time"
"github.com/aquasecurity/fanal/analyzer/os"
"github.com/aquasecurity/trivy/pkg/report"
"github.com/aquasecurity/trivy/pkg/types"
v2 "github.com/future-architect/vuls/contrib/trivy/parser/v2"
"github.com/future-architect/vuls/models"
"golang.org/x/xerrors"
)
// Parse :
func Parse(vulnJSON []byte, scanResult *models.ScanResult) (result *models.ScanResult, err error) {
var trivyResults report.Results
if err = json.Unmarshal(vulnJSON, &trivyResults); err != nil {
return nil, err
}
pkgs := models.Packages{}
vulnInfos := models.VulnInfos{}
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
for _, trivyResult := range trivyResults {
for _, vuln := range trivyResult.Vulnerabilities {
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
CveID: vuln.VulnerabilityID,
Confidences: models.Confidences{
{
Score: 100,
DetectionMethod: models.TrivyMatchStr,
},
},
AffectedPackages: models.PackageFixStatuses{},
CveContents: models.CveContents{},
LibraryFixedIns: models.LibraryFixedIns{},
// VulnType : "",
}
}
vulnInfo := vulnInfos[vuln.VulnerabilityID]
var notFixedYet bool
fixState := ""
if len(vuln.FixedVersion) == 0 {
notFixedYet = true
fixState = "Affected"
}
var references models.References
for _, reference := range vuln.References {
references = append(references, models.Reference{
Source: "trivy",
Link: reference,
})
}
sort.Slice(references, func(i, j int) bool {
return references[i].Link < references[j].Link
})
vulnInfo.CveContents = models.CveContents{
models.Trivy: models.CveContent{
Cvss3Severity: vuln.Severity,
References: references,
Title: vuln.Title,
Summary: vuln.Description,
},
}
// do only if image type is Vuln
if IsTrivySupportedOS(trivyResult.Type) {
pkgs[vuln.PkgName] = models.Package{
Name: vuln.PkgName,
Version: vuln.InstalledVersion,
}
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
Name: vuln.PkgName,
NotFixedYet: notFixedYet,
FixState: fixState,
FixedIn: vuln.FixedVersion,
})
// overwrite every time if os package
scanResult.Family = trivyResult.Type
scanResult.ServerName = trivyResult.Target
scanResult.Optional = map[string]interface{}{
"trivy-target": trivyResult.Target,
}
scanResult.ScannedAt = time.Now()
scanResult.ScannedBy = "trivy"
scanResult.ScannedVia = "trivy"
} else {
// LibraryScanの結果
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
Key: trivyResult.Type,
Name: vuln.PkgName,
Path: trivyResult.Target,
FixedIn: vuln.FixedVersion,
})
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
libScanner.Libs = append(libScanner.Libs, types.Library{
Name: vuln.PkgName,
Version: vuln.InstalledVersion,
})
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
}
vulnInfos[vuln.VulnerabilityID] = vulnInfo
}
}
// flatten and unique libraries
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
for path, v := range uniqueLibraryScannerPaths {
uniqueLibrary := map[string]types.Library{}
for _, lib := range v.Libs {
uniqueLibrary[lib.Name+lib.Version] = lib
}
var libraries []types.Library
for _, library := range uniqueLibrary {
libraries = append(libraries, library)
}
sort.Slice(libraries, func(i, j int) bool {
return libraries[i].Name < libraries[j].Name
})
libscanner := models.LibraryScanner{
Path: path,
Libs: libraries,
}
libraryScanners = append(libraryScanners, libscanner)
}
sort.Slice(libraryScanners, func(i, j int) bool {
return libraryScanners[i].Path < libraryScanners[j].Path
})
scanResult.ScannedCves = vulnInfos
scanResult.Packages = pkgs
scanResult.LibraryScanners = libraryScanners
return scanResult, nil
// Parser is a parser interface
type Parser interface {
Parse(vulnJSON []byte) (result *models.ScanResult, err error)
}
// IsTrivySupportedOS :
func IsTrivySupportedOS(family string) bool {
supportedFamilies := []string{
os.RedHat,
os.Debian,
os.Ubuntu,
os.CentOS,
os.Fedora,
os.Amazon,
os.Oracle,
os.Windows,
os.OpenSUSE,
os.OpenSUSELeap,
os.OpenSUSETumbleweed,
os.SLES,
os.Photon,
os.Alpine,
}
for _, supportedFamily := range supportedFamilies {
if family == supportedFamily {
return true
}
}
return false
// Report is used for judgeing the scheme version of trivy
type Report struct {
SchemaVersion int `json:",omitempty"`
}
// NewParser make a parser for the schema version of trivy
func NewParser(vulnJSON []byte) (Parser, error) {
r := Report{}
if err := json.Unmarshal(vulnJSON, &r); err != nil {
return nil, xerrors.Errorf("Failed to parse JSON. Please use the latest version of trivy, trivy-to-vuls and future-vuls")
}
switch r.SchemaVersion {
case 2:
return v2.ParserV2{}, nil
default:
return nil, xerrors.Errorf("Failed to parse trivy json. SchemeVersion %d is not supported yet. Please contact support", r.SchemaVersion)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,79 @@
package v2
import (
"encoding/json"
"fmt"
"regexp"
"time"
"github.com/aquasecurity/trivy/pkg/types"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/contrib/trivy/pkg"
"github.com/future-architect/vuls/models"
)
// ParserV2 is a parser for scheme v2
type ParserV2 struct {
}
// Parse trivy's JSON and convert to the Vuls struct
func (p ParserV2) Parse(vulnJSON []byte) (result *models.ScanResult, err error) {
var report types.Report
if err = json.Unmarshal(vulnJSON, &report); err != nil {
return nil, err
}
scanResult, err := pkg.Convert(report.Results)
if err != nil {
return nil, err
}
if err := setScanResultMeta(scanResult, &report); err != nil {
return nil, err
}
return scanResult, nil
}
var dockerTagPattern = regexp.MustCompile(`^(.*):(.*)$`)
func setScanResultMeta(scanResult *models.ScanResult, report *types.Report) error {
if len(report.Results) == 0 {
return xerrors.Errorf("scanned images or libraries are not supported by Trivy. see https://aquasecurity.github.io/trivy/dev/vulnerability/detection/os/, https://aquasecurity.github.io/trivy/dev/vulnerability/detection/language/")
}
scanResult.ServerName = report.ArtifactName
if report.ArtifactType == "container_image" {
matches := dockerTagPattern.FindStringSubmatch(report.ArtifactName)
var imageName, imageTag string
if 2 < len(matches) {
// including the image tag
imageName = matches[1]
imageTag = matches[2]
} else {
// no image tag
imageName = report.ArtifactName
imageTag = "latest" // Complement if the tag is omitted
}
scanResult.ServerName = fmt.Sprintf("%s:%s", imageName, imageTag)
if scanResult.Optional == nil {
scanResult.Optional = map[string]interface{}{}
}
scanResult.Optional["TRIVY_IMAGE_NAME"] = imageName
scanResult.Optional["TRIVY_IMAGE_TAG"] = imageTag
}
if report.Metadata.OS != nil {
scanResult.Family = report.Metadata.OS.Family
scanResult.Release = report.Metadata.OS.Name
} else {
scanResult.Family = constant.ServerTypePseudo
}
scanResult.ScannedAt = time.Now()
scanResult.ScannedBy = "trivy"
scanResult.ScannedVia = "trivy"
return nil
}

View File

@@ -0,0 +1,805 @@
package v2
import (
"testing"
"github.com/d4l3k/messagediff"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/models"
)
func TestParse(t *testing.T) {
cases := map[string]struct {
vulnJSON []byte
expected *models.ScanResult
}{
"image redis": {
vulnJSON: redisTrivy,
expected: redisSR,
},
"image struts": {
vulnJSON: strutsTrivy,
expected: strutsSR,
},
"image osAndLib": {
vulnJSON: osAndLibTrivy,
expected: osAndLibSR,
},
}
for testcase, v := range cases {
actual, err := ParserV2{}.Parse(v.vulnJSON)
if err != nil {
t.Errorf("%s", err)
}
diff, equal := messagediff.PrettyDiff(
v.expected,
actual,
messagediff.IgnoreStructField("ScannedAt"),
messagediff.IgnoreStructField("Title"),
messagediff.IgnoreStructField("Summary"),
messagediff.IgnoreStructField("LastModified"),
messagediff.IgnoreStructField("Published"),
)
if !equal {
t.Errorf("test: %s, diff %s", testcase, diff)
}
}
}
var redisTrivy = []byte(`
{
"SchemaVersion": 2,
"ArtifactName": "redis",
"ArtifactType": "container_image",
"Metadata": {
"OS": {
"Family": "debian",
"Name": "10.10"
},
"ImageID": "sha256:ddcca4b8a6f0367b5de2764dfe76b0a4bfa6d75237932185923705da47004347",
"DiffIDs": [
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
],
"RepoTags": [
"redis:latest"
],
"RepoDigests": [
"redis@sha256:66ce9bc742609650afc3de7009658473ed601db4e926a5b16d239303383bacad"
],
"ImageConfig": {
"architecture": "amd64",
"container": "fa59f1c2817c9095f8f7272a4ab9b11db0332b33efb3a82c00a3d1fec8763684",
"created": "2021-08-17T14:30:06.550779326Z",
"docker_version": "20.10.7",
"history": [
{
"created": "2021-08-17T01:24:06Z",
"created_by": "/bin/sh -c #(nop) ADD file:87b4e60fe3af680c6815448374365a44e9ea461bc8ade2960b4639c25aed3ba9 in / "
},
{
"created": "2021-08-17T14:30:06Z",
"created_by": "/bin/sh -c #(nop) CMD [\"redis-server\"]",
"empty_layer": true
}
],
"os": "linux",
"rootfs": {
"type": "layers",
"diff_ids": [
"sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781",
"sha256:b6fc243eaea74d1a41b242da4c3ec5166db80f38c4d57a10ce8860c00d902ace",
"sha256:ec92e47b7c52dacc26df07ee13e8e81c099b5a5661ccc97b06692a9c9d01e772",
"sha256:4be6d4460d3615186717f21ffc0023b168dce48967d01934bbe31127901d3d5c",
"sha256:992463b683270e164936e9c48fa395d05a7b8b5cc0aa208e4fa81aa9158fcae1",
"sha256:0083597d42d190ddb86c35587a7b196fe18d79382520544b5f715c1e4792b19a"
]
},
"config": {
"Cmd": [
"redis-server"
],
"Entrypoint": [
"docker-entrypoint.sh"
],
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"GOSU_VERSION=1.12",
"REDIS_VERSION=6.2.5",
"REDIS_DOWNLOAD_URL=http://download.redis.io/releases/redis-6.2.5.tar.gz",
"REDIS_DOWNLOAD_SHA=4b9a75709a1b74b3785e20a6c158cab94cf52298aa381eea947a678a60d551ae"
],
"Image": "sha256:befbd3fc62bffcd0115008969a014faaad07828b2c54b4bcfd2d9fc3aa2508cd",
"Volumes": {
"/data": {}
},
"WorkingDir": "/data"
}
}
},
"Results": [
{
"Target": "redis (debian 10.10)",
"Class": "os-pkgs",
"Type": "debian",
"Packages": [
{
"Name": "adduser",
"Version": "3.118",
"SrcName": "adduser",
"SrcVersion": "3.118",
"Layer": {
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
}
},
{
"Name": "apt",
"Version": "1.8.2.3",
"SrcName": "apt",
"SrcVersion": "1.8.2.3",
"Layer": {
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
}
},
{
"Name": "bsdutils",
"Version": "1:2.33.1-0.1",
"SrcName": "util-linux",
"SrcVersion": "2.33.1-0.1",
"Layer": {
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
}
},
{
"Name": "pkgA",
"Version": "1:2.33.1-0.1",
"SrcName": "util-linux",
"SrcVersion": "2.33.1-0.1",
"Layer": {
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
}
}
],
"Vulnerabilities": [
{
"VulnerabilityID": "CVE-2011-3374",
"PkgName": "apt",
"InstalledVersion": "1.8.2.3",
"Layer": {
"DiffID": "sha256:f68ef921efae588b3dd5cc466a1ca9c94c24785f1fa9420bea15ecc2dedbe781"
},
"SeveritySource": "debian",
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2011-3374",
"Description": "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
"Severity": "LOW",
"CweIDs": [
"CWE-347"
],
"CVSS": {
"nvd": {
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:L/A:N",
"V2Score": 4.3,
"V3Score": 3.7
}
},
"References": [
"https://access.redhat.com/security/cve/cve-2011-3374"
],
"PublishedDate": "2019-11-26T00:15:00Z",
"LastModifiedDate": "2021-02-09T16:08:00Z"
}
]
}
]
}
`)
var redisSR = &models.ScanResult{
JSONVersion: 4,
ServerName: "redis:latest",
Family: "debian",
Release: "10.10",
ScannedBy: "trivy",
ScannedVia: "trivy",
ScannedCves: models.VulnInfos{
"CVE-2011-3374": {
CveID: "CVE-2011-3374",
Confidences: models.Confidences{
models.Confidence{
Score: 100,
DetectionMethod: "TrivyMatch",
},
},
AffectedPackages: models.PackageFixStatuses{
models.PackageFixStatus{
Name: "apt",
NotFixedYet: true,
FixState: "Affected",
FixedIn: "",
}},
CveContents: models.CveContents{
"trivy": []models.CveContent{{
Title: "",
Summary: "It was found that apt-key in apt, all versions, do not correctly validate gpg keys with the master keyring, leading to a potential man-in-the-middle attack.",
Cvss3Severity: "LOW",
References: models.References{
{Source: "trivy", Link: "https://access.redhat.com/security/cve/cve-2011-3374"},
},
}},
},
LibraryFixedIns: models.LibraryFixedIns{},
},
},
LibraryScanners: models.LibraryScanners{},
Packages: models.Packages{
"apt": models.Package{
Name: "apt",
Version: "1.8.2.3",
},
"adduser": models.Package{
Name: "adduser",
Version: "3.118",
},
"bsdutils": models.Package{
Name: "bsdutils",
Version: "1:2.33.1-0.1",
},
"pkgA": models.Package{
Name: "pkgA",
Version: "1:2.33.1-0.1",
},
},
SrcPackages: models.SrcPackages{
"util-linux": models.SrcPackage{
Name: "util-linux",
Version: "2.33.1-0.1",
BinaryNames: []string{"bsdutils", "pkgA"},
},
},
Optional: map[string]interface{}{
"TRIVY_IMAGE_NAME": "redis",
"TRIVY_IMAGE_TAG": "latest",
},
}
var strutsTrivy = []byte(`
{
"SchemaVersion": 2,
"ArtifactName": "/data/struts-1.2.7/lib",
"ArtifactType": "filesystem",
"Metadata": {
"ImageConfig": {
"architecture": "",
"created": "0001-01-01T00:00:00Z",
"os": "",
"rootfs": {
"type": "",
"diff_ids": null
},
"config": {}
}
},
"Results": [
{
"Target": "Java",
"Class": "lang-pkgs",
"Type": "jar",
"Packages": [
{
"Name": "oro:oro",
"Version": "2.0.7",
"Layer": {}
},
{
"Name": "struts:struts",
"Version": "1.2.7",
"Layer": {}
},
{
"Name": "commons-beanutils:commons-beanutils",
"Version": "1.7.0",
"Layer": {}
}
],
"Vulnerabilities": [
{
"VulnerabilityID": "CVE-2014-0114",
"PkgName": "commons-beanutils:commons-beanutils",
"InstalledVersion": "1.7.0",
"FixedVersion": "1.9.2",
"Layer": {},
"SeveritySource": "nvd",
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2014-0114",
"Title": "Apache Struts 1: Class Loader manipulation via request parameters",
"Description": "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
"Severity": "HIGH",
"CweIDs": [
"CWE-20"
],
"CVSS": {
"nvd": {
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
"V2Score": 7.5
},
"redhat": {
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
"V2Score": 7.5
}
},
"References": [
"http://advisories.mageia.org/MGASA-2014-0219.html"
],
"PublishedDate": "2014-04-30T10:49:00Z",
"LastModifiedDate": "2021-01-26T18:15:00Z"
},
{
"VulnerabilityID": "CVE-2012-1007",
"PkgName": "struts:struts",
"InstalledVersion": "1.2.7",
"Layer": {},
"SeveritySource": "nvd",
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2012-1007",
"Title": "struts: multiple XSS flaws",
"Description": "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
"Severity": "MEDIUM",
"CweIDs": [
"CWE-79"
],
"CVSS": {
"nvd": {
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
"V2Score": 4.3
},
"redhat": {
"V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
"V2Score": 4.3
}
},
"References": [
"https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"
],
"PublishedDate": "2012-02-07T04:09:00Z",
"LastModifiedDate": "2018-10-17T01:29:00Z"
}
]
}
]
}`)
var strutsSR = &models.ScanResult{
JSONVersion: 4,
ServerName: "/data/struts-1.2.7/lib",
Family: "pseudo",
ScannedBy: "trivy",
ScannedVia: "trivy",
ScannedCves: models.VulnInfos{
"CVE-2014-0114": {
CveID: "CVE-2014-0114",
Confidences: models.Confidences{
models.Confidence{
Score: 100,
DetectionMethod: "TrivyMatch",
},
},
CveContents: models.CveContents{
"trivy": []models.CveContent{{
Title: "Apache Struts 1: Class Loader manipulation via request parameters",
Summary: "Apache Commons BeanUtils, as distributed in lib/commons-beanutils-1.8.0.jar in Apache Struts 1.x through 1.3.10 and in other products requiring commons-beanutils through 1.9.2, does not suppress the class property, which allows remote attackers to \"manipulate\" the ClassLoader and execute arbitrary code via the class parameter, as demonstrated by the passing of this parameter to the getClass method of the ActionForm object in Struts 1.",
Cvss3Severity: "HIGH",
References: models.References{
{Source: "trivy", Link: "http://advisories.mageia.org/MGASA-2014-0219.html"},
},
}},
},
LibraryFixedIns: models.LibraryFixedIns{
models.LibraryFixedIn{
Key: "jar",
Name: "commons-beanutils:commons-beanutils",
FixedIn: "1.9.2",
//TODO use Artifactname?
Path: "Java",
},
},
AffectedPackages: models.PackageFixStatuses{},
},
"CVE-2012-1007": {
CveID: "CVE-2012-1007",
Confidences: models.Confidences{
models.Confidence{
Score: 100,
DetectionMethod: "TrivyMatch",
},
},
CveContents: models.CveContents{
"trivy": []models.CveContent{{
Title: "struts: multiple XSS flaws",
Summary: "Multiple cross-site scripting (XSS) vulnerabilities in Apache Struts 1.3.10 allow remote attackers to inject arbitrary web script or HTML via (1) the name parameter to struts-examples/upload/upload-submit.do, or the message parameter to (2) struts-cookbook/processSimple.do or (3) struts-cookbook/processDyna.do.",
Cvss3Severity: "MEDIUM",
References: models.References{
{Source: "trivy", Link: "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-1007"},
},
}},
},
LibraryFixedIns: models.LibraryFixedIns{
models.LibraryFixedIn{
Key: "jar",
Name: "struts:struts",
FixedIn: "",
//TODO use Artifactname?
Path: "Java",
},
},
AffectedPackages: models.PackageFixStatuses{},
},
},
LibraryScanners: models.LibraryScanners{
models.LibraryScanner{
Type: "jar",
LockfilePath: "Java",
Libs: []models.Library{
{
Name: "commons-beanutils:commons-beanutils",
Version: "1.7.0",
},
{
Name: "oro:oro",
Version: "2.0.7",
},
{
Name: "struts:struts",
Version: "1.2.7",
},
},
},
},
Packages: models.Packages{},
SrcPackages: models.SrcPackages{},
Optional: nil,
}
var osAndLibTrivy = []byte(`
{
"SchemaVersion": 2,
"ArtifactName": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0",
"ArtifactType": "container_image",
"Metadata": {
"OS": {
"Family": "debian",
"Name": "10.2"
},
"ImageID": "sha256:5a992077baba51b97f27591a10d54d2f2723dc9c81a3fe419e261023f2554933",
"DiffIDs": [
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
],
"RepoTags": [
"quay.io/fluentd_elasticsearch/fluentd:v2.9.0"
],
"RepoDigests": [
"quay.io/fluentd_elasticsearch/fluentd@sha256:54716d825ec9791ffb403ac17a1e82159c98ac6161e02b2a054595ad01aa6726"
],
"ImageConfig": {
"architecture": "amd64",
"container": "232f3fc7ddffd71dc3ff52c6c0c3a5feea2f51acffd9b53850a8fc6f1a15319a",
"created": "2020-03-04T13:59:39.161374106Z",
"docker_version": "19.03.4",
"history": [
{
"created": "2020-03-04T13:59:39.161374106Z",
"created_by": "/bin/sh -c #(nop) CMD [\"/run.sh\"]",
"empty_layer": true
}
],
"os": "linux",
"rootfs": {
"type": "layers",
"diff_ids": [
"sha256:25165eb51d15842f870f97873e0a58409d5e860e6108e3dd829bd10e484c0065"
]
},
"config": {
"Cmd": [
"/run.sh"
],
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
],
"Image": "sha256:2a538358cddc4824e9eff1531e0c63ae5e3cda85d2984c647df9b1c816b9b86b",
"ExposedPorts": {
"80/tcp": {}
}
}
}
},
"Results": [
{
"Target": "quay.io/fluentd_elasticsearch/fluentd:v2.9.0 (debian 10.2)",
"Class": "os-pkgs",
"Type": "debian",
"Packages": [
{
"Name": "libgnutls30",
"Version": "3.6.7-4",
"SrcName": "gnutls28",
"SrcVersion": "3.6.7-4",
"Layer": {
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
}
}
],
"Vulnerabilities": [
{
"VulnerabilityID": "CVE-2021-20231",
"PkgName": "libgnutls30",
"InstalledVersion": "3.6.7-4",
"FixedVersion": "3.6.7-4+deb10u7",
"Layer": {
"Digest": "sha256:000eee12ec04cc914bf96e8f5dee7767510c2aca3816af6078bd9fbe3150920c",
"DiffID": "sha256:831c5620387fb9efec59fc82a42b948546c6be601e3ab34a87108ecf852aa15f"
},
"SeveritySource": "nvd",
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2021-20231",
"Title": "gnutls: Use after free in client key_share extension",
"Description": "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
"Severity": "CRITICAL",
"CweIDs": [
"CWE-416"
],
"CVSS": {
"nvd": {
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"V2Score": 7.5,
"V3Score": 9.8
},
"redhat": {
"V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:N/A:L",
"V3Score": 3.7
}
},
"References": [
"https://bugzilla.redhat.com/show_bug.cgi?id=1922276"
],
"PublishedDate": "2021-03-12T19:15:00Z",
"LastModifiedDate": "2021-06-01T14:07:00Z"
}
]
},
{
"Target": "Ruby",
"Class": "lang-pkgs",
"Type": "gemspec",
"Packages": [
{
"Name": "activesupport",
"Version": "6.0.2.1",
"License": "MIT",
"Layer": {
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
},
"FilePath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec"
}
],
"Vulnerabilities": [
{
"VulnerabilityID": "CVE-2020-8165",
"PkgName": "activesupport",
"PkgPath": "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
"InstalledVersion": "6.0.2.1",
"FixedVersion": "6.0.3.1, 5.2.4.3",
"Layer": {
"Digest": "sha256:a8877cad19f14a7044524a145ce33170085441a7922458017db1631dcd5f7602",
"DiffID": "sha256:75e43d55939745950bc3f8fad56c5834617c4339f0f54755e69a0dd5372624e9"
},
"SeveritySource": "nvd",
"PrimaryURL": "https://avd.aquasec.com/nvd/cve-2020-8165",
"Title": "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
"Description": "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
"Severity": "CRITICAL",
"CweIDs": [
"CWE-502"
],
"CVSS": {
"nvd": {
"V2Vector": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"V2Score": 7.5,
"V3Score": 9.8
},
"redhat": {
"V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"V3Score": 9.8
}
},
"References": [
"https://www.debian.org/security/2020/dsa-4766"
],
"PublishedDate": "2020-06-19T18:15:00Z",
"LastModifiedDate": "2020-10-17T12:15:00Z"
}
]
}
]
}`)
var osAndLibSR = &models.ScanResult{
JSONVersion: 4,
ServerName: "quay.io/fluentd_elasticsearch/fluentd:v2.9.0",
Family: "debian",
Release: "10.2",
ScannedBy: "trivy",
ScannedVia: "trivy",
ScannedCves: models.VulnInfos{
"CVE-2021-20231": {
CveID: "CVE-2021-20231",
Confidences: models.Confidences{
models.Confidence{
Score: 100,
DetectionMethod: "TrivyMatch",
},
},
AffectedPackages: models.PackageFixStatuses{
models.PackageFixStatus{
Name: "libgnutls30",
NotFixedYet: false,
FixState: "",
FixedIn: "3.6.7-4+deb10u7",
}},
CveContents: models.CveContents{
"trivy": []models.CveContent{{
Title: "gnutls: Use after free in client key_share extension",
Summary: "A flaw was found in gnutls. A use after free issue in client sending key_share extension may lead to memory corruption and other consequences.",
Cvss3Severity: "CRITICAL",
References: models.References{
{Source: "trivy", Link: "https://bugzilla.redhat.com/show_bug.cgi?id=1922276"},
},
}},
},
LibraryFixedIns: models.LibraryFixedIns{},
},
"CVE-2020-8165": {
CveID: "CVE-2020-8165",
Confidences: models.Confidences{
models.Confidence{
Score: 100,
DetectionMethod: "TrivyMatch",
},
},
AffectedPackages: models.PackageFixStatuses{},
CveContents: models.CveContents{
"trivy": []models.CveContent{{
Title: "rubygem-activesupport: potentially unintended unmarshalling of user-provided objects in MemCacheStore and RedisCacheStore",
Summary: "A deserialization of untrusted data vulnernerability exists in rails \u003c 5.2.4.3, rails \u003c 6.0.3.1 that can allow an attacker to unmarshal user-provided objects in MemCacheStore and RedisCacheStore potentially resulting in an RCE.",
Cvss3Severity: "CRITICAL",
References: models.References{
{Source: "trivy", Link: "https://www.debian.org/security/2020/dsa-4766"},
},
}},
},
LibraryFixedIns: models.LibraryFixedIns{
models.LibraryFixedIn{
Key: "gemspec",
Name: "activesupport",
FixedIn: "6.0.3.1, 5.2.4.3",
Path: "Ruby",
},
},
},
},
LibraryScanners: models.LibraryScanners{
models.LibraryScanner{
Type: "gemspec",
LockfilePath: "Ruby",
Libs: []models.Library{
{
Name: "activesupport",
Version: "6.0.2.1",
FilePath: "var/lib/gems/2.5.0/specifications/activesupport-6.0.2.1.gemspec",
},
},
},
},
Packages: models.Packages{
"libgnutls30": models.Package{
Name: "libgnutls30",
Version: "3.6.7-4",
},
},
SrcPackages: models.SrcPackages{
"gnutls28": models.SrcPackage{
Name: "gnutls28",
Version: "3.6.7-4",
BinaryNames: []string{"libgnutls30"},
},
},
Optional: map[string]interface{}{
"TRIVY_IMAGE_NAME": "quay.io/fluentd_elasticsearch/fluentd",
"TRIVY_IMAGE_TAG": "v2.9.0",
},
}
func TestParseError(t *testing.T) {
cases := map[string]struct {
vulnJSON []byte
expected error
}{
"image hello-world": {
vulnJSON: helloWorldTrivy,
expected: xerrors.Errorf("scanned images or libraries are not supported by Trivy. see https://aquasecurity.github.io/trivy/dev/vulnerability/detection/os/, https://aquasecurity.github.io/trivy/dev/vulnerability/detection/language/"),
},
}
for testcase, v := range cases {
_, err := ParserV2{}.Parse(v.vulnJSON)
diff, equal := messagediff.PrettyDiff(
v.expected,
err,
messagediff.IgnoreStructField("frame"),
)
if !equal {
t.Errorf("test: %s, diff %s", testcase, diff)
}
}
}
var helloWorldTrivy = []byte(`
{
"SchemaVersion": 2,
"ArtifactName": "hello-world:latest",
"ArtifactType": "container_image",
"Metadata": {
"ImageID": "sha256:feb5d9fea6a5e9606aa995e879d862b825965ba48de054caab5ef356dc6b3412",
"DiffIDs": [
"sha256:e07ee1baac5fae6a26f30cabfe54a36d3402f96afda318fe0a96cec4ca393359"
],
"RepoTags": [
"hello-world:latest"
],
"RepoDigests": [
"hello-world@sha256:97a379f4f88575512824f3b352bc03cd75e239179eea0fecc38e597b2209f49a"
],
"ImageConfig": {
"architecture": "amd64",
"container": "8746661ca3c2f215da94e6d3f7dfdcafaff5ec0b21c9aff6af3dc379a82fbc72",
"created": "2021-09-23T23:47:57.442225064Z",
"docker_version": "20.10.7",
"history": [
{
"created": "2021-09-23T23:47:57Z",
"created_by": "/bin/sh -c #(nop) COPY file:50563a97010fd7ce1ceebd1fa4f4891ac3decdf428333fb2683696f4358af6c2 in / "
},
{
"created": "2021-09-23T23:47:57Z",
"created_by": "/bin/sh -c #(nop) CMD [\"/hello\"]",
"empty_layer": true
}
],
"os": "linux",
"rootfs": {
"type": "layers",
"diff_ids": [
"sha256:e07ee1baac5fae6a26f30cabfe54a36d3402f96afda318fe0a96cec4ca393359"
]
},
"config": {
"Cmd": [
"/hello"
],
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"Image": "sha256:b9935d4e8431fb1a7f0989304ec86b3329a99a25f5efdc7f09f3f8c41434ca6d"
}
}
}
}`)

View File

@@ -0,0 +1,200 @@
package pkg
import (
"sort"
"time"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/os"
"github.com/aquasecurity/trivy/pkg/types"
"github.com/future-architect/vuls/models"
)
// Convert :
func Convert(results types.Results) (result *models.ScanResult, err error) {
scanResult := &models.ScanResult{
JSONVersion: models.JSONVersion,
ScannedCves: models.VulnInfos{},
}
pkgs := models.Packages{}
srcPkgs := models.SrcPackages{}
vulnInfos := models.VulnInfos{}
uniqueLibraryScannerPaths := map[string]models.LibraryScanner{}
for _, trivyResult := range results {
for _, vuln := range trivyResult.Vulnerabilities {
if _, ok := vulnInfos[vuln.VulnerabilityID]; !ok {
vulnInfos[vuln.VulnerabilityID] = models.VulnInfo{
CveID: vuln.VulnerabilityID,
Confidences: models.Confidences{
{
Score: 100,
DetectionMethod: models.TrivyMatchStr,
},
},
AffectedPackages: models.PackageFixStatuses{},
CveContents: models.CveContents{},
LibraryFixedIns: models.LibraryFixedIns{},
// VulnType : "",
}
}
vulnInfo := vulnInfos[vuln.VulnerabilityID]
var notFixedYet bool
fixState := ""
if len(vuln.FixedVersion) == 0 {
notFixedYet = true
fixState = "Affected"
}
var references models.References
for _, reference := range vuln.References {
references = append(references, models.Reference{
Source: "trivy",
Link: reference,
})
}
sort.Slice(references, func(i, j int) bool {
return references[i].Link < references[j].Link
})
var published time.Time
if vuln.PublishedDate != nil {
published = *vuln.PublishedDate
}
var lastModified time.Time
if vuln.LastModifiedDate != nil {
lastModified = *vuln.LastModifiedDate
}
vulnInfo.CveContents = models.CveContents{
models.Trivy: []models.CveContent{{
Cvss3Severity: vuln.Severity,
References: references,
Title: vuln.Title,
Summary: vuln.Description,
Published: published,
LastModified: lastModified,
}},
}
// do only if image type is Vuln
if isTrivySupportedOS(trivyResult.Type) {
pkgs[vuln.PkgName] = models.Package{
Name: vuln.PkgName,
Version: vuln.InstalledVersion,
}
vulnInfo.AffectedPackages = append(vulnInfo.AffectedPackages, models.PackageFixStatus{
Name: vuln.PkgName,
NotFixedYet: notFixedYet,
FixState: fixState,
FixedIn: vuln.FixedVersion,
})
} else {
vulnInfo.LibraryFixedIns = append(vulnInfo.LibraryFixedIns, models.LibraryFixedIn{
Key: trivyResult.Type,
Name: vuln.PkgName,
Path: trivyResult.Target,
FixedIn: vuln.FixedVersion,
})
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
libScanner.Type = trivyResult.Type
libScanner.Libs = append(libScanner.Libs, models.Library{
Name: vuln.PkgName,
Version: vuln.InstalledVersion,
FilePath: vuln.PkgPath,
})
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
}
vulnInfos[vuln.VulnerabilityID] = vulnInfo
}
// --list-all-pkgs flg of trivy will output all installed packages, so collect them.
if trivyResult.Class == types.ClassOSPkg {
for _, p := range trivyResult.Packages {
pkgs[p.Name] = models.Package{
Name: p.Name,
Version: p.Version,
}
if p.Name != p.SrcName {
if v, ok := srcPkgs[p.SrcName]; !ok {
srcPkgs[p.SrcName] = models.SrcPackage{
Name: p.SrcName,
Version: p.SrcVersion,
BinaryNames: []string{p.Name},
}
} else {
v.AddBinaryName(p.Name)
srcPkgs[p.SrcName] = v
}
}
}
} else if trivyResult.Class == types.ClassLangPkg {
libScanner := uniqueLibraryScannerPaths[trivyResult.Target]
libScanner.Type = trivyResult.Type
for _, p := range trivyResult.Packages {
libScanner.Libs = append(libScanner.Libs, models.Library{
Name: p.Name,
Version: p.Version,
FilePath: p.FilePath,
})
}
uniqueLibraryScannerPaths[trivyResult.Target] = libScanner
}
}
// flatten and unique libraries
libraryScanners := make([]models.LibraryScanner, 0, len(uniqueLibraryScannerPaths))
for path, v := range uniqueLibraryScannerPaths {
uniqueLibrary := map[string]models.Library{}
for _, lib := range v.Libs {
uniqueLibrary[lib.Name+lib.Version] = lib
}
var libraries []models.Library
for _, library := range uniqueLibrary {
libraries = append(libraries, library)
}
sort.Slice(libraries, func(i, j int) bool {
return libraries[i].Name < libraries[j].Name
})
libscanner := models.LibraryScanner{
Type: v.Type,
LockfilePath: path,
Libs: libraries,
}
libraryScanners = append(libraryScanners, libscanner)
}
sort.Slice(libraryScanners, func(i, j int) bool {
return libraryScanners[i].LockfilePath < libraryScanners[j].LockfilePath
})
scanResult.ScannedCves = vulnInfos
scanResult.Packages = pkgs
scanResult.SrcPackages = srcPkgs
scanResult.LibraryScanners = libraryScanners
return scanResult, nil
}
func isTrivySupportedOS(family string) bool {
supportedFamilies := map[string]struct{}{
os.RedHat: {},
os.Debian: {},
os.Ubuntu: {},
os.CentOS: {},
os.Rocky: {},
os.Alma: {},
os.Fedora: {},
os.Amazon: {},
os.Oracle: {},
os.Windows: {},
os.OpenSUSE: {},
os.OpenSUSELeap: {},
os.OpenSUSETumbleweed: {},
os.SLES: {},
os.Photon: {},
os.Alpine: {},
}
_, ok := supportedFamilies[family]
return ok
}

4052
cti/cti.go Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,14 @@
package cwe
// CweTopTwentyfive2019 has CWE-ID in CWE Top 25
var CweTopTwentyfive2019 = map[string]string{
// CweTopTwentyfives has CWE-ID in CWE Top 25
var CweTopTwentyfives = map[string]map[string]string{
"2019": cweTopTwentyfive2019,
"2020": cweTopTwentyfive2020,
"2021": cweTopTwentyfive2021,
"2022": cweTopTwentyfive2022,
}
var cweTopTwentyfive2019 = map[string]string{
"119": "1",
"79": "2",
"20": "3",
@@ -29,5 +36,94 @@ var CweTopTwentyfive2019 = map[string]string{
"295": "25",
}
// CweTopTwentyfive2019URL has CWE Top25 links
var CweTopTwentyfive2019URL = "https://cwe.mitre.org/top25/archive/2019/2019_cwe_top25.html"
var cweTopTwentyfive2020 = map[string]string{
"79": "1",
"787": "2",
"20": "3",
"125": "4",
"119": "5",
"89": "6",
"200": "7",
"416": "8",
"352": "9",
"78": "10",
"190": "11",
"22": "12",
"476": "13",
"287": "14",
"434": "16",
"732": "16",
"94": "17",
"522": "18",
"611": "19",
"798": "20",
"502": "21",
"269": "22",
"400": "23",
"306": "24",
"862": "25",
}
var cweTopTwentyfive2021 = map[string]string{
"787": "1",
"79": "2",
"125": "3",
"20": "4",
"78": "5",
"89": "6",
"416": "7",
"22": "8",
"352": "9",
"434": "10",
"306": "11",
"190": "12",
"502": "13",
"287": "14",
"476": "16",
"798": "16",
"119": "17",
"862": "18",
"276": "19",
"200": "20",
"522": "21",
"732": "22",
"611": "23",
"918": "24",
"77": "25",
}
var cweTopTwentyfive2022 = map[string]string{
"787": "1",
"79": "2",
"89": "3",
"20": "4",
"125": "5",
"78": "6",
"416": "7",
"22": "8",
"352": "9",
"434": "10",
"476": "11",
"502": "12",
"190": "13",
"287": "14",
"798": "16",
"862": "16",
"77": "17",
"306": "18",
"119": "19",
"276": "20",
"918": "21",
"362": "22",
"400": "23",
"611": "24",
"94": "25",
}
// CweTopTwentyfiveURLs has CWE Top25 links
var CweTopTwentyfiveURLs = map[string]string{
"2019": "https://cwe.mitre.org/top25/archive/2019/2019_cwe_top25.html",
"2020": "https://cwe.mitre.org/top25/archive/2020/2020_cwe_top25.html",
"2021": "https://cwe.mitre.org/top25/archive/2021/2021_cwe_top25.html",
"2022": "https://cwe.mitre.org/top25/archive/2022/2022_cwe_top25.html",
}

4802
cwe/en.go

File diff suppressed because it is too large Load Diff

2616
cwe/ja.go

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,12 @@
package cwe
// OwaspTopTen2017 has CWE-ID in OWSP Top 10
var OwaspTopTen2017 = map[string]string{
// OwaspTopTens has CWE-ID in OWASP Top 10
var OwaspTopTens = map[string]map[string]string{
"2017": owaspTopTen2017,
"2021": owaspTopTen2021,
}
var owaspTopTen2017 = map[string]string{
"77": "1",
"89": "1",
"564": "1",
@@ -36,30 +41,265 @@ var OwaspTopTen2017 = map[string]string{
"778": "10",
}
// OwaspTopTen2017GitHubURLEn has GitHub links
var OwaspTopTen2017GitHubURLEn = map[string]string{
"1": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa1-injection.md",
"2": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa2-broken-authentication.md",
"3": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa3-sensitive-data-disclosure.md",
"4": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa4-xxe.md",
"5": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa5-broken-access-control.md",
"6": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa6-security-misconfiguration.md",
"7": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa7-xss.md",
"8": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa8-insecure-deserialization.md",
"9": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa9-known-vulns.md<Paste>",
"10": "https://github.com/OWASP/Top10/blob/master/2017/en/0xaa-logging-detection-response.md",
var owaspTopTen2021 = map[string]string{
"22": "1",
"23": "1",
"35": "1",
"59": "1",
"200": "1",
"201": "1",
"219": "1",
"264": "1",
"275": "1",
"276": "1",
"284": "1",
"285": "1",
"352": "1",
"359": "1",
"377": "1",
"402": "1",
"425": "1",
"441": "1",
"497": "1",
"538": "1",
"540": "1",
"552": "1",
"566": "1",
"601": "1",
"639": "1",
"651": "1",
"668": "1",
"706": "1",
"862": "1",
"863": "1",
"913": "1",
"922": "1",
"1275": "1",
"261": "2",
"296": "2",
"310": "2",
"319": "2",
"321": "2",
"322": "2",
"323": "2",
"324": "2",
"325": "2",
"326": "2",
"327": "2",
"328": "2",
"329": "2",
"330": "2",
"331": "2",
"335": "2",
"336": "2",
"337": "2",
"338": "2",
"340": "2",
"347": "2",
"523": "2",
"720": "2",
"757": "2",
"759": "2",
"760": "2",
"780": "2",
"818": "2",
"916": "2",
"20": "3",
"74": "3",
"75": "3",
"77": "3",
"78": "3",
"79": "3",
"80": "3",
"83": "3",
"87": "3",
"88": "3",
"89": "3",
"90": "3",
"91": "3",
"93": "3",
"94": "3",
"95": "3",
"96": "3",
"97": "3",
"98": "3",
"99": "3",
"100": "3",
"113": "3",
"116": "3",
"138": "3",
"184": "3",
"470": "3",
"471": "3",
"564": "3",
"610": "3",
"643": "3",
"644": "3",
"652": "3",
"917": "3",
"73": "4",
"183": "4",
"209": "4",
"213": "4",
"235": "4",
"256": "4",
"257": "4",
"266": "4",
"269": "4",
"280": "4",
"311": "4",
"312": "4",
"313": "4",
"316": "4",
"419": "4",
"430": "4",
"434": "4",
"444": "4",
"451": "4",
"472": "4",
"501": "4",
"522": "4",
"525": "4",
"539": "4",
"579": "4",
"598": "4",
"602": "4",
"642": "4",
"646": "4",
"650": "4",
"653": "4",
"656": "4",
"657": "4",
"799": "4",
"807": "4",
"840": "4",
"841": "4",
"927": "4",
"1021": "4",
"1173": "4",
"2": "5",
"11": "5",
"13": "5",
"15": "5",
"16": "5",
"260": "5",
"315": "5",
"520": "5",
"526": "5",
"537": "5",
"541": "5",
"547": "5",
"611": "5",
"614": "5",
"756": "5",
"776": "5",
"942": "5",
"1004": "5",
"1032": "5",
"1174": "5",
"937": "6",
"1035": "6",
"1104": "6",
"255": "7",
"259": "7",
"287": "7",
"288": "7",
"290": "7",
"294": "7",
"295": "7",
"297": "7",
"300": "7",
"302": "7",
"304": "7",
"306": "7",
"307": "7",
"346": "7",
"384": "7",
"521": "7",
"613": "7",
"620": "7",
"640": "7",
"798": "7",
"940": "7",
"1216": "7",
"345": "8",
"353": "8",
"426": "8",
"494": "8",
"502": "8",
"565": "8",
"784": "8",
"829": "8",
"830": "8",
"915": "8",
"117": "9",
"223": "9",
"532": "9",
"778": "9",
"918": "10",
}
// OwaspTopTen2017GitHubURLJa has GitHub links
var OwaspTopTen2017GitHubURLJa = map[string]string{
"1": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa1-injection.md",
"2": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa2-broken-authentication.md",
"3": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa3-sensitive-data-disclosure.md",
"4": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa4-xxe.md",
"5": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa5-broken-access-control.md",
"6": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa6-security-misconfiguration.md",
"7": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa7-xss.md",
"8": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa8-insecure-deserialization.md",
"9": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa9-known-vulns.md<Paste>",
"10": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xaa-logging-detection-response.md",
// OwaspTopTenURLsEn has GitHub links
var OwaspTopTenURLsEn = map[string]map[string]string{
"2017": {
"1": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa1-injection.md",
"2": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa2-broken-authentication.md",
"3": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa3-sensitive-data-disclosure.md",
"4": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa4-xxe.md",
"5": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa5-broken-access-control.md",
"6": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa6-security-misconfiguration.md",
"7": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa7-xss.md",
"8": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa8-insecure-deserialization.md",
"9": "https://github.com/OWASP/Top10/blob/master/2017/en/0xa9-known-vulns.md",
"10": "https://github.com/OWASP/Top10/blob/master/2017/en/0xaa-logging-detection-response.md",
},
"2021": {
"1": "https://github.com/OWASP/Top10/blob/master/2021/docs/A01_2021-Broken_Access_Control.md",
"2": "https://github.com/OWASP/Top10/blob/master/2021/docs/A02_2021-Cryptographic_Failures.md",
"3": "https://github.com/OWASP/Top10/blob/master/2021/docs/A03_2021-Injection.md",
"4": "https://github.com/OWASP/Top10/blob/master/2021/docs/A04_2021-Insecure_Design.md",
"5": "https://github.com/OWASP/Top10/blob/master/2021/docs/A05_2021-Security_Misconfiguration.md",
"6": "https://github.com/OWASP/Top10/blob/master/2021/docs/A06_2021-Vulnerable_and_Outdated_Components.md",
"7": "https://github.com/OWASP/Top10/blob/master/2021/docs/A07_2021-Identification_and_Authentication_Failures.md",
"8": "https://github.com/OWASP/Top10/blob/master/2021/docs/A08_2021-Software_and_Data_Integrity_Failures.md",
"9": "https://github.com/OWASP/Top10/blob/master/2021/docs/A09_2021-Security_Logging_and_Monitoring_Failures.md",
"10": "https://github.com/OWASP/Top10/blob/master/2021/docs/A10_2021-Server-Side_Request_Forgery_(SSRF).md",
},
}
// OwaspTopTenURLsJa has GitHub links
var OwaspTopTenURLsJa = map[string]map[string]string{
"2017": {
"1": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa1-injection.md",
"2": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa2-broken-authentication.md",
"3": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa3-sensitive-data-disclosure.md",
"4": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa4-xxe.md",
"5": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa5-broken-access-control.md",
"6": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa6-security-misconfiguration.md",
"7": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa7-xss.md",
"8": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa8-insecure-deserialization.md",
"9": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xa9-known-vulns.md",
"10": "https://github.com/OWASP/Top10/blob/master/2017/ja/0xaa-logging-detection-response.md",
},
"2021": {
"1": "https://github.com/OWASP/Top10/blob/master/2021/docs/A01_2021-Broken_Access_Control.ja.md",
"2": "https://github.com/OWASP/Top10/blob/master/2021/docs/A02_2021-Cryptographic_Failures.ja.md",
"3": "https://github.com/OWASP/Top10/blob/master/2021/docs/A03_2021-Injection.ja.md",
"4": "https://github.com/OWASP/Top10/blob/master/2021/docs/A04_2021-Insecure_Design.ja.md",
"5": "https://github.com/OWASP/Top10/blob/master/2021/docs/A05_2021-Security_Misconfiguration.ja.md",
"6": "https://github.com/OWASP/Top10/blob/master/2021/docs/A06_2021-Vulnerable_and_Outdated_Components.ja.md",
"7": "https://github.com/OWASP/Top10/blob/master/2021/docs/A07_2021-Identification_and_Authentication_Failures.ja.md",
"8": "https://github.com/OWASP/Top10/blob/master/2021/docs/A08_2021-Software_and_Data_Integrity_Failures.ja.md",
"9": "https://github.com/OWASP/Top10/blob/master/2021/docs/A09_2021-Security_Logging_and_Monitoring_Failures.ja.md",
"10": "https://github.com/OWASP/Top10/blob/master/2021/docs/A10_2021-Server-Side_Request_Forgery_(SSRF).ja.md",
},
}

View File

@@ -1,7 +1,41 @@
package cwe
// SansTopTwentyfive has CWE-ID in CWE/SANS Top 25
var SansTopTwentyfive = map[string]string{
// SansTopTwentyfives has CWE-ID in CWE/SANS Top 25
var SansTopTwentyfives = map[string]map[string]string{
"2010": sansTopTwentyfive2010,
"2011": sansTopTwentyfive2011,
"latest": sansTopTwentyfiveLatest,
}
var sansTopTwentyfive2010 = map[string]string{
"79": "1",
"89": "2",
"120": "3",
"352": "4",
"285": "5",
"807": "6",
"22": "7",
"434": "8",
"78": "9",
"311": "10",
"798": "11",
"805": "12",
"98": "13",
"129": "14",
"754": "15",
"209": "16",
"190": "17",
"131": "18",
"306": "19",
"494": "20",
"732": "21",
"770": "22",
"601": "23",
"327": "24",
"362": "25",
}
var sansTopTwentyfive2011 = map[string]string{
"89": "1",
"78": "2",
"120": "3",
@@ -29,5 +63,37 @@ var SansTopTwentyfive = map[string]string{
"759": "25",
}
// SansTopTwentyfiveURL is a URL of sans 25
var SansTopTwentyfiveURL = "https://www.sans.org/top25-software-errors/"
var sansTopTwentyfiveLatest = map[string]string{
"119": "1",
"79": "2",
"20": "3",
"200": "4",
"125": "5",
"89": "6",
"416": "7",
"190": "8",
"352": "9",
"22": "10",
"78": "11",
"787": "12",
"287": "13",
"476": "14",
"732": "15",
"434": "16",
"611": "17",
"94": "18",
"798": "19",
"400": "20",
"772": "21",
"426": "22",
"502": "23",
"269": "24",
"295": "25",
}
// SansTopTwentyfiveURLs has CWE/SANS Top25 links
var SansTopTwentyfiveURLs = map[string]string{
"2010": "https://cwe.mitre.org/top25/archive/2010/2010_cwe_sans_top25.html",
"2011": "https://cwe.mitre.org/top25/archive/2011/2011_cwe_sans_top25.html",
"latest": "https://www.sans.org/top25-software-errors/",
}

222
detector/cti.go Normal file
View File

@@ -0,0 +1,222 @@
//go:build !scanner
// +build !scanner
package detector
import (
"encoding/json"
"net/http"
"time"
"github.com/cenkalti/backoff"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
ctidb "github.com/vulsio/go-cti/db"
ctilog "github.com/vulsio/go-cti/utils"
)
// goCTIDBClient is a DB Driver
type goCTIDBClient struct {
driver ctidb.DB
baseURL string
}
// closeDB close a DB connection
func (client goCTIDBClient) closeDB() error {
if client.driver == nil {
return nil
}
return client.driver.CloseDB()
}
func newGoCTIDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goCTIDBClient, error) {
if err := ctilog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
return nil, xerrors.Errorf("Failed to set go-cti logger. err: %w", err)
}
db, err := newCTIDB(cnf)
if err != nil {
return nil, xerrors.Errorf("Failed to newCTIDB. err: %w", err)
}
return &goCTIDBClient{driver: db, baseURL: cnf.GetURL()}, nil
}
// FillWithCTI :
func FillWithCTI(r *models.ScanResult, cnf config.CtiConf, logOpts logging.LogOpts) error {
client, err := newGoCTIDBClient(&cnf, logOpts)
if err != nil {
return err
}
defer func() {
if err := client.closeDB(); err != nil {
logging.Log.Errorf("Failed to close DB. err: %+v", err)
}
}()
nCti := 0
if client.driver == nil {
var cveIDs []string
for cveID := range r.ScannedCves {
cveIDs = append(cveIDs, cveID)
}
prefix, err := util.URLPathJoin(client.baseURL, "cves")
if err != nil {
return err
}
responses, err := getCTIsViaHTTP(cveIDs, prefix)
if err != nil {
return err
}
for _, res := range responses {
var techniqueIDs []string
if err := json.Unmarshal([]byte(res.json), &techniqueIDs); err != nil {
return err
}
v, ok := r.ScannedCves[res.request.cveID]
if ok {
v.Ctis = techniqueIDs
nCti++
}
r.ScannedCves[res.request.cveID] = v
}
} else {
for cveID, vuln := range r.ScannedCves {
if cveID == "" {
continue
}
techniqueIDs, err := client.driver.GetTechniqueIDsByCveID(cveID)
if err != nil {
return xerrors.Errorf("Failed to get CTIs by CVE-ID. err: %w", err)
}
if len(techniqueIDs) == 0 {
continue
}
vuln.Ctis = techniqueIDs
nCti++
r.ScannedCves[cveID] = vuln
}
}
logging.Log.Infof("%s: Cyber Threat Intelligences are detected for %d CVEs", r.FormatServerName(), nCti)
return nil
}
type ctiResponse struct {
request ctiRequest
json string
}
func getCTIsViaHTTP(cveIDs []string, urlPrefix string) (responses []ctiResponse, err error) {
nReq := len(cveIDs)
reqChan := make(chan ctiRequest, nReq)
resChan := make(chan ctiResponse, nReq)
errChan := make(chan error, nReq)
defer close(reqChan)
defer close(resChan)
defer close(errChan)
go func() {
for _, cveID := range cveIDs {
reqChan <- ctiRequest{
cveID: cveID,
}
}
}()
concurrency := 10
tasks := util.GenWorkers(concurrency)
for i := 0; i < nReq; i++ {
tasks <- func() {
req := <-reqChan
url, err := util.URLPathJoin(
urlPrefix,
req.cveID,
)
if err != nil {
errChan <- err
} else {
logging.Log.Debugf("HTTP Request to %s", url)
httpGetCTI(url, req, resChan, errChan)
}
}
}
timeout := time.After(2 * 60 * time.Second)
var errs []error
for i := 0; i < nReq; i++ {
select {
case res := <-resChan:
responses = append(responses, res)
case err := <-errChan:
errs = append(errs, err)
case <-timeout:
return nil, xerrors.New("Timeout Fetching CTI")
}
}
if len(errs) != 0 {
return nil, xerrors.Errorf("Failed to fetch CTI. err: %w", errs)
}
return
}
type ctiRequest struct {
cveID string
}
func httpGetCTI(url string, req ctiRequest, resChan chan<- ctiResponse, errChan chan<- error) {
var body string
var errs []error
var resp *http.Response
count, retryMax := 0, 3
f := func() (err error) {
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
count++
if count == retryMax {
return nil
}
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
}
if err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify); err != nil {
errChan <- xerrors.Errorf("HTTP Error %w", err)
return
}
if count == retryMax {
errChan <- xerrors.New("Retry count exceeded")
return
}
resChan <- ctiResponse{
request: req,
json: body,
}
}
func newCTIDB(cnf config.VulnDictInterface) (ctidb.DB, error) {
if cnf.IsFetchViaHTTP() {
return nil, nil
}
path := cnf.GetURL()
if cnf.GetType() == "sqlite3" {
path = cnf.GetSQLite3Path()
}
driver, err := ctidb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), ctidb.Option{})
if err != nil {
if xerrors.Is(err, ctidb.ErrDBLocked) {
return nil, xerrors.Errorf("Failed to init cti DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
}
return nil, xerrors.Errorf("Failed to init cti DB. DB Path: %s, err: %w", path, err)
}
return driver, nil
}

224
detector/cve_client.go Normal file
View File

@@ -0,0 +1,224 @@
//go:build !scanner
// +build !scanner
package detector
import (
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/cenkalti/backoff"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/util"
cvedb "github.com/vulsio/go-cve-dictionary/db"
cvelog "github.com/vulsio/go-cve-dictionary/log"
cvemodels "github.com/vulsio/go-cve-dictionary/models"
)
type goCveDictClient struct {
driver cvedb.DB
baseURL string
}
func newGoCveDictClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goCveDictClient, error) {
if err := cvelog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
return nil, xerrors.Errorf("Failed to set go-cve-dictionary logger. err: %w", err)
}
driver, err := newCveDB(cnf)
if err != nil {
return nil, xerrors.Errorf("Failed to newCveDB. err: %w", err)
}
return &goCveDictClient{driver: driver, baseURL: cnf.GetURL()}, nil
}
func (client goCveDictClient) closeDB() error {
if client.driver == nil {
return nil
}
return client.driver.CloseDB()
}
type response struct {
Key string
CveDetail cvemodels.CveDetail
}
func (client goCveDictClient) fetchCveDetails(cveIDs []string) (cveDetails []cvemodels.CveDetail, err error) {
if client.driver == nil {
reqChan := make(chan string, len(cveIDs))
resChan := make(chan response, len(cveIDs))
errChan := make(chan error, len(cveIDs))
defer close(reqChan)
defer close(resChan)
defer close(errChan)
go func() {
for _, cveID := range cveIDs {
reqChan <- cveID
}
}()
concurrency := 10
tasks := util.GenWorkers(concurrency)
for range cveIDs {
tasks <- func() {
select {
case cveID := <-reqChan:
url, err := util.URLPathJoin(client.baseURL, "cves", cveID)
if err != nil {
errChan <- err
} else {
logging.Log.Debugf("HTTP Request to %s", url)
httpGet(cveID, url, resChan, errChan)
}
}
}
}
timeout := time.After(2 * 60 * time.Second)
var errs []error
for range cveIDs {
select {
case res := <-resChan:
cveDetails = append(cveDetails, res.CveDetail)
case err := <-errChan:
errs = append(errs, err)
case <-timeout:
return nil, xerrors.New("Timeout Fetching CVE")
}
}
if len(errs) != 0 {
return nil,
xerrors.Errorf("Failed to fetch CVE. err: %w", errs)
}
} else {
m, err := client.driver.GetMulti(cveIDs)
if err != nil {
return nil, xerrors.Errorf("Failed to GetMulti. err: %w", err)
}
for _, v := range m {
cveDetails = append(cveDetails, v)
}
}
return cveDetails, nil
}
func httpGet(key, url string, resChan chan<- response, errChan chan<- error) {
var body string
var errs []error
var resp *http.Response
f := func() (err error) {
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("HTTP GET Error, url: %s, resp: %v, err: %+v",
url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
}
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
if err != nil {
errChan <- xerrors.Errorf("HTTP Error: %w", err)
return
}
cveDetail := cvemodels.CveDetail{}
if err := json.Unmarshal([]byte(body), &cveDetail); err != nil {
errChan <- xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
return
}
resChan <- response{
key,
cveDetail,
}
}
func (client goCveDictClient) detectCveByCpeURI(cpeURI string, useJVN bool) (cves []cvemodels.CveDetail, err error) {
if client.driver == nil {
url, err := util.URLPathJoin(client.baseURL, "cpes")
if err != nil {
return nil, xerrors.Errorf("Failed to join URLPath. err: %w", err)
}
query := map[string]string{"name": cpeURI}
logging.Log.Debugf("HTTP Request to %s, query: %#v", url, query)
if cves, err = httpPost(url, query); err != nil {
return nil, xerrors.Errorf("Failed to post HTTP Request. err: %w", err)
}
} else {
if cves, err = client.driver.GetByCpeURI(cpeURI); err != nil {
return nil, xerrors.Errorf("Failed to get CVEs by CPEURI. err: %w", err)
}
}
if useJVN {
return cves, nil
}
nvdCves := []cvemodels.CveDetail{}
for _, cve := range cves {
if !cve.HasNvd() {
continue
}
cve.Jvns = []cvemodels.Jvn{}
nvdCves = append(nvdCves, cve)
}
return nvdCves, nil
}
func httpPost(url string, query map[string]string) ([]cvemodels.CveDetail, error) {
var body string
var errs []error
var resp *http.Response
f := func() (err error) {
req := gorequest.New().Timeout(10 * time.Second).Post(url)
for key := range query {
req = req.Send(fmt.Sprintf("%s=%s", key, query[key])).Type("json")
}
resp, body, errs = req.End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
return xerrors.Errorf("HTTP POST error. url: %s, resp: %v, err: %+v", url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed to HTTP POST. retrying in %s seconds. err: %+v", t, err)
}
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
if err != nil {
return nil, xerrors.Errorf("HTTP Error: %w", err)
}
cveDetails := []cvemodels.CveDetail{}
if err := json.Unmarshal([]byte(body), &cveDetails); err != nil {
return nil,
xerrors.Errorf("Failed to Unmarshal. body: %s, err: %w", body, err)
}
return cveDetails, nil
}
func newCveDB(cnf config.VulnDictInterface) (cvedb.DB, error) {
if cnf.IsFetchViaHTTP() {
return nil, nil
}
path := cnf.GetURL()
if cnf.GetType() == "sqlite3" {
path = cnf.GetSQLite3Path()
}
driver, err := cvedb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), cvedb.Option{})
if err != nil {
if xerrors.Is(err, cvedb.ErrDBLocked) {
return nil, xerrors.Errorf("Failed to init CVE DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
}
return nil, xerrors.Errorf("Failed to init CVE DB. DB Path: %s, err: %w", path, err)
}
return driver, nil
}

629
detector/detector.go Normal file
View File

@@ -0,0 +1,629 @@
//go:build !scanner
// +build !scanner
package detector
import (
"os"
"strings"
"time"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/contrib/owasp-dependency-check/parser"
"github.com/future-architect/vuls/cwe"
"github.com/future-architect/vuls/gost"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/oval"
"github.com/future-architect/vuls/reporter"
"github.com/future-architect/vuls/util"
cvemodels "github.com/vulsio/go-cve-dictionary/models"
)
// Cpe :
type Cpe struct {
CpeURI string
UseJVN bool
}
// Detect vulns and fill CVE detailed information
func Detect(rs []models.ScanResult, dir string) ([]models.ScanResult, error) {
// Use the same reportedAt for all rs
reportedAt := time.Now()
for i, r := range rs {
if !config.Conf.RefreshCve && !needToRefreshCve(r) {
logging.Log.Info("No need to refresh")
continue
}
if !reuseScannedCves(&r) {
r.ScannedCves = models.VulnInfos{}
}
if err := DetectLibsCves(&r, config.Conf.TrivyCacheDBDir, config.Conf.NoProgress); err != nil {
return nil, xerrors.Errorf("Failed to fill with Library dependency: %w", err)
}
if err := DetectPkgCves(&r, config.Conf.OvalDict, config.Conf.Gost, config.Conf.LogOpts); err != nil {
return nil, xerrors.Errorf("Failed to detect Pkg CVE: %w", err)
}
cpeURIs, owaspDCXMLPath := []string{}, ""
cpes := []Cpe{}
if len(r.Container.ContainerID) == 0 {
cpeURIs = config.Conf.Servers[r.ServerName].CpeNames
owaspDCXMLPath = config.Conf.Servers[r.ServerName].OwaspDCXMLPath
} else {
if s, ok := config.Conf.Servers[r.ServerName]; ok {
if con, ok := s.Containers[r.Container.Name]; ok {
cpeURIs = con.Cpes
owaspDCXMLPath = con.OwaspDCXMLPath
}
}
}
if owaspDCXMLPath != "" {
cpes, err := parser.Parse(owaspDCXMLPath)
if err != nil {
return nil, xerrors.Errorf("Failed to read OWASP Dependency Check XML on %s, `%s`, err: %w",
r.ServerInfo(), owaspDCXMLPath, err)
}
cpeURIs = append(cpeURIs, cpes...)
}
for _, uri := range cpeURIs {
cpes = append(cpes, Cpe{
CpeURI: uri,
UseJVN: true,
})
}
if err := DetectCpeURIsCves(&r, cpes, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
return nil, xerrors.Errorf("Failed to detect CVE of `%s`: %w", cpeURIs, err)
}
repos := config.Conf.Servers[r.ServerName].GitHubRepos
if err := DetectGitHubCves(&r, repos); err != nil {
return nil, xerrors.Errorf("Failed to detect GitHub Cves: %w", err)
}
if err := DetectWordPressCves(&r, config.Conf.WpScan); err != nil {
return nil, xerrors.Errorf("Failed to detect WordPress Cves: %w", err)
}
if err := gost.FillCVEsWithRedHat(&r, config.Conf.Gost, config.Conf.LogOpts); err != nil {
return nil, xerrors.Errorf("Failed to fill with gost: %w", err)
}
if err := FillCvesWithNvdJvn(&r, config.Conf.CveDict, config.Conf.LogOpts); err != nil {
return nil, xerrors.Errorf("Failed to fill with CVE: %w", err)
}
nExploitCve, err := FillWithExploit(&r, config.Conf.Exploit, config.Conf.LogOpts)
if err != nil {
return nil, xerrors.Errorf("Failed to fill with exploit: %w", err)
}
logging.Log.Infof("%s: %d PoC are detected", r.FormatServerName(), nExploitCve)
nMetasploitCve, err := FillWithMetasploit(&r, config.Conf.Metasploit, config.Conf.LogOpts)
if err != nil {
return nil, xerrors.Errorf("Failed to fill with metasploit: %w", err)
}
logging.Log.Infof("%s: %d exploits are detected", r.FormatServerName(), nMetasploitCve)
if err := FillWithKEVuln(&r, config.Conf.KEVuln, config.Conf.LogOpts); err != nil {
return nil, xerrors.Errorf("Failed to fill with Known Exploited Vulnerabilities: %w", err)
}
if err := FillWithCTI(&r, config.Conf.Cti, config.Conf.LogOpts); err != nil {
return nil, xerrors.Errorf("Failed to fill with Cyber Threat Intelligences: %w", err)
}
FillCweDict(&r)
r.ReportedBy, _ = os.Hostname()
r.Lang = config.Conf.Lang
r.ReportedAt = reportedAt
r.ReportedVersion = config.Version
r.ReportedRevision = config.Revision
r.Config.Report = config.Conf
r.Config.Report.Servers = map[string]config.ServerInfo{
r.ServerName: config.Conf.Servers[r.ServerName],
}
rs[i] = r
}
// Overwrite the json file every time to clear the fields specified in config.IgnoredJSONKeys
for _, r := range rs {
if s, ok := config.Conf.Servers[r.ServerName]; ok {
r = r.ClearFields(s.IgnoredJSONKeys)
}
//TODO don't call here
if err := reporter.OverwriteJSONFile(dir, r); err != nil {
return nil, xerrors.Errorf("Failed to write JSON: %w", err)
}
}
if config.Conf.DiffPlus || config.Conf.DiffMinus {
prevs, err := loadPrevious(rs, config.Conf.ResultsDir)
if err != nil {
return nil, xerrors.Errorf("Failed to load previous results. err: %w", err)
}
rs = diff(rs, prevs, config.Conf.DiffPlus, config.Conf.DiffMinus)
}
for i, r := range rs {
nFiltered := 0
logging.Log.Infof("%s: total %d CVEs detected", r.FormatServerName(), len(r.ScannedCves))
if 0 < config.Conf.CvssScoreOver {
r.ScannedCves, nFiltered = r.ScannedCves.FilterByCvssOver(config.Conf.CvssScoreOver)
logging.Log.Infof("%s: %d CVEs filtered by --cvss-over=%g", r.FormatServerName(), nFiltered, config.Conf.CvssScoreOver)
}
if config.Conf.IgnoreUnfixed {
r.ScannedCves, nFiltered = r.ScannedCves.FilterUnfixed(config.Conf.IgnoreUnfixed)
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unfixed", r.FormatServerName(), nFiltered)
}
if 0 < config.Conf.ConfidenceScoreOver {
r.ScannedCves, nFiltered = r.ScannedCves.FilterByConfidenceOver(config.Conf.ConfidenceScoreOver)
logging.Log.Infof("%s: %d CVEs filtered by --confidence-over=%d", r.FormatServerName(), nFiltered, config.Conf.ConfidenceScoreOver)
}
// IgnoreCves
ignoreCves := []string{}
if r.Container.Name == "" {
ignoreCves = config.Conf.Servers[r.ServerName].IgnoreCves
} else if con, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
ignoreCves = con.IgnoreCves
}
if 0 < len(ignoreCves) {
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnoreCves(ignoreCves)
logging.Log.Infof("%s: %d CVEs filtered by ignoreCves=%s", r.FormatServerName(), nFiltered, ignoreCves)
}
// ignorePkgs
ignorePkgsRegexps := []string{}
if r.Container.Name == "" {
ignorePkgsRegexps = config.Conf.Servers[r.ServerName].IgnorePkgsRegexp
} else if s, ok := config.Conf.Servers[r.ServerName].Containers[r.Container.Name]; ok {
ignorePkgsRegexps = s.IgnorePkgsRegexp
}
if 0 < len(ignorePkgsRegexps) {
r.ScannedCves, nFiltered = r.ScannedCves.FilterIgnorePkgs(ignorePkgsRegexps)
logging.Log.Infof("%s: %d CVEs filtered by ignorePkgsRegexp=%s", r.FormatServerName(), nFiltered, ignorePkgsRegexps)
}
// IgnoreUnscored
if config.Conf.IgnoreUnscoredCves {
r.ScannedCves, nFiltered = r.ScannedCves.FindScoredVulns()
logging.Log.Infof("%s: %d CVEs filtered by --ignore-unscored-cves", r.FormatServerName(), nFiltered)
}
r.FilterInactiveWordPressLibs(config.Conf.WpScan.DetectInactive)
rs[i] = r
}
return rs, nil
}
// DetectPkgCves detects OS pkg cves
// pass 2 configs
func DetectPkgCves(r *models.ScanResult, ovalCnf config.GovalDictConf, gostCnf config.GostConf, logOpts logging.LogOpts) error {
// Pkg Scan
if isPkgCvesDetactable(r) {
// OVAL, gost(Debian Security Tracker) does not support Package for Raspbian, so skip it.
if r.Family == constant.Raspbian {
r = r.RemoveRaspbianPackFromResult()
}
// OVAL
if err := detectPkgsCvesWithOval(ovalCnf, r, logOpts); err != nil {
return xerrors.Errorf("Failed to detect CVE with OVAL: %w", err)
}
// gost
if err := detectPkgsCvesWithGost(gostCnf, r, logOpts); err != nil {
return xerrors.Errorf("Failed to detect CVE with gost: %w", err)
}
}
for i, v := range r.ScannedCves {
for j, p := range v.AffectedPackages {
if p.NotFixedYet && p.FixState == "" {
p.FixState = "Not fixed yet"
r.ScannedCves[i].AffectedPackages[j] = p
}
}
}
// To keep backward compatibility
// Newer versions use ListenPortStats,
// but older versions of Vuls are set to ListenPorts.
// Set ListenPorts to ListenPortStats to allow newer Vuls to report old results.
for i, pkg := range r.Packages {
for j, proc := range pkg.AffectedProcs {
for _, ipPort := range proc.ListenPorts {
ps, err := models.NewPortStat(ipPort)
if err != nil {
logging.Log.Warnf("Failed to parse ip:port: %s, err:%+v", ipPort, err)
continue
}
r.Packages[i].AffectedProcs[j].ListenPortStats = append(
r.Packages[i].AffectedProcs[j].ListenPortStats, *ps)
}
}
}
return nil
}
// isPkgCvesDetactable checks whether CVEs is detactable with gost and oval from the result
func isPkgCvesDetactable(r *models.ScanResult) bool {
switch r.Family {
case constant.FreeBSD, constant.ServerTypePseudo:
logging.Log.Infof("%s type. Skip OVAL and gost detection", r.Family)
return false
case constant.Windows:
return true
default:
if r.ScannedVia == "trivy" {
logging.Log.Infof("r.ScannedVia is trivy. Skip OVAL and gost detection")
return false
}
if r.Release == "" {
logging.Log.Infof("r.Release is empty. Skip OVAL and gost detection")
return false
}
if len(r.Packages)+len(r.SrcPackages) == 0 {
logging.Log.Infof("Number of packages is 0. Skip OVAL and gost detection")
return false
}
return true
}
}
// DetectGitHubCves fetches CVEs from GitHub Security Alerts
func DetectGitHubCves(r *models.ScanResult, githubConfs map[string]config.GitHubConf) error {
if len(githubConfs) == 0 {
return nil
}
r.GitHubManifests = models.DependencyGraphManifests{}
for ownerRepo, setting := range githubConfs {
ss := strings.Split(ownerRepo, "/")
if len(ss) != 2 {
return xerrors.Errorf("Failed to parse GitHub owner/repo: %s", ownerRepo)
}
owner, repo := ss[0], ss[1]
n, err := DetectGitHubSecurityAlerts(r, owner, repo, setting.Token, setting.IgnoreGitHubDismissed)
if err != nil {
return xerrors.Errorf("Failed to access GitHub Security Alerts: %w", err)
}
logging.Log.Infof("%s: %d CVEs detected with GHSA %s/%s",
r.FormatServerName(), n, owner, repo)
if err = DetectGitHubDependencyGraph(r, owner, repo, setting.Token); err != nil {
return xerrors.Errorf("Failed to access GitHub Dependency graph: %w", err)
}
}
return nil
}
// DetectWordPressCves detects CVEs of WordPress
func DetectWordPressCves(r *models.ScanResult, wpCnf config.WpScanConf) error {
if len(r.WordPressPackages) == 0 {
return nil
}
logging.Log.Infof("%s: Detect WordPress CVE. Number of pkgs: %d ", r.ServerInfo(), len(r.WordPressPackages))
n, err := detectWordPressCves(r, wpCnf)
if err != nil {
return xerrors.Errorf("Failed to detect WordPress CVE: %w", err)
}
logging.Log.Infof("%s: found %d WordPress CVEs", r.FormatServerName(), n)
return nil
}
// FillCvesWithNvdJvn fills CVE detail with NVD, JVN
func FillCvesWithNvdJvn(r *models.ScanResult, cnf config.GoCveDictConf, logOpts logging.LogOpts) (err error) {
cveIDs := []string{}
for _, v := range r.ScannedCves {
cveIDs = append(cveIDs, v.CveID)
}
client, err := newGoCveDictClient(&cnf, logOpts)
if err != nil {
return xerrors.Errorf("Failed to newGoCveDictClient. err: %w", err)
}
defer func() {
if err := client.closeDB(); err != nil {
logging.Log.Errorf("Failed to close DB. err: %+v", err)
}
}()
ds, err := client.fetchCveDetails(cveIDs)
if err != nil {
return xerrors.Errorf("Failed to fetchCveDetails. err: %w", err)
}
for _, d := range ds {
nvds, exploits, mitigations := models.ConvertNvdToModel(d.CveID, d.Nvds)
jvns := models.ConvertJvnToModel(d.CveID, d.Jvns)
alerts := fillCertAlerts(&d)
for cveID, vinfo := range r.ScannedCves {
if vinfo.CveID == d.CveID {
if vinfo.CveContents == nil {
vinfo.CveContents = models.CveContents{}
}
for _, con := range nvds {
if !con.Empty() {
vinfo.CveContents[con.Type] = []models.CveContent{con}
}
}
for _, con := range jvns {
if !con.Empty() {
found := false
for _, cveCont := range vinfo.CveContents[con.Type] {
if con.SourceLink == cveCont.SourceLink {
found = true
break
}
}
if !found {
vinfo.CveContents[con.Type] = append(vinfo.CveContents[con.Type], con)
}
}
}
vinfo.AlertDict = alerts
vinfo.Exploits = append(vinfo.Exploits, exploits...)
vinfo.Mitigations = append(vinfo.Mitigations, mitigations...)
r.ScannedCves[cveID] = vinfo
break
}
}
}
return nil
}
func fillCertAlerts(cvedetail *cvemodels.CveDetail) (dict models.AlertDict) {
for _, nvd := range cvedetail.Nvds {
for _, cert := range nvd.Certs {
dict.USCERT = append(dict.USCERT, models.Alert{
URL: cert.Link,
Title: cert.Title,
Team: "uscert",
})
}
}
for _, jvn := range cvedetail.Jvns {
for _, cert := range jvn.Certs {
dict.JPCERT = append(dict.JPCERT, models.Alert{
URL: cert.Link,
Title: cert.Title,
Team: "jpcert",
})
}
}
return dict
}
// detectPkgsCvesWithOval fetches OVAL database
func detectPkgsCvesWithOval(cnf config.GovalDictConf, r *models.ScanResult, logOpts logging.LogOpts) error {
client, err := oval.NewOVALClient(r.Family, cnf, logOpts)
if err != nil {
return err
}
defer func() {
if err := client.CloseDB(); err != nil {
logging.Log.Errorf("Failed to close the OVAL DB. err: %+v", err)
}
}()
switch r.Family {
case constant.Debian, constant.Raspbian, constant.Ubuntu:
logging.Log.Infof("Skip OVAL and Scan with gost alone.")
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), 0)
return nil
case constant.Windows, constant.FreeBSD, constant.ServerTypePseudo:
return nil
default:
logging.Log.Debugf("Check if oval fetched: %s %s", r.Family, r.Release)
ok, err := client.CheckIfOvalFetched(r.Family, r.Release)
if err != nil {
return err
}
if !ok {
return xerrors.Errorf("OVAL entries of %s %s are not found. Fetch OVAL before reporting. For details, see `https://github.com/vulsio/goval-dictionary#usage`", r.Family, r.Release)
}
}
logging.Log.Debugf("Check if oval fresh: %s %s", r.Family, r.Release)
_, err = client.CheckIfOvalFresh(r.Family, r.Release)
if err != nil {
return err
}
logging.Log.Debugf("Fill with oval: %s %s", r.Family, r.Release)
nCVEs, err := client.FillWithOval(r)
if err != nil {
return err
}
logging.Log.Infof("%s: %d CVEs are detected with OVAL", r.FormatServerName(), nCVEs)
return nil
}
func detectPkgsCvesWithGost(cnf config.GostConf, r *models.ScanResult, logOpts logging.LogOpts) error {
client, err := gost.NewGostClient(cnf, r.Family, logOpts)
if err != nil {
return xerrors.Errorf("Failed to new a gost client: %w", err)
}
defer func() {
if err := client.CloseDB(); err != nil {
logging.Log.Errorf("Failed to close the gost DB. err: %+v", err)
}
}()
nCVEs, err := client.DetectCVEs(r, true)
if err != nil {
switch r.Family {
case constant.Debian, constant.Raspbian, constant.Ubuntu, constant.Windows:
return xerrors.Errorf("Failed to detect CVEs with gost: %w", err)
default:
return xerrors.Errorf("Failed to detect unfixed CVEs with gost: %w", err)
}
}
switch r.Family {
case constant.Debian, constant.Raspbian, constant.Ubuntu, constant.Windows:
logging.Log.Infof("%s: %d CVEs are detected with gost", r.FormatServerName(), nCVEs)
default:
logging.Log.Infof("%s: %d unfixed CVEs are detected with gost", r.FormatServerName(), nCVEs)
}
return nil
}
// DetectCpeURIsCves detects CVEs of given CPE-URIs
func DetectCpeURIsCves(r *models.ScanResult, cpes []Cpe, cnf config.GoCveDictConf, logOpts logging.LogOpts) error {
client, err := newGoCveDictClient(&cnf, logOpts)
if err != nil {
return xerrors.Errorf("Failed to newGoCveDictClient. err: %w", err)
}
defer func() {
if err := client.closeDB(); err != nil {
logging.Log.Errorf("Failed to close DB. err: %+v", err)
}
}()
nCVEs := 0
for _, cpe := range cpes {
details, err := client.detectCveByCpeURI(cpe.CpeURI, cpe.UseJVN)
if err != nil {
return xerrors.Errorf("Failed to detectCveByCpeURI. err: %w", err)
}
for _, detail := range details {
advisories := []models.DistroAdvisory{}
if !detail.HasNvd() && detail.HasJvn() {
for _, jvn := range detail.Jvns {
advisories = append(advisories, models.DistroAdvisory{
AdvisoryID: jvn.JvnID,
})
}
}
maxConfidence := getMaxConfidence(detail)
if val, ok := r.ScannedCves[detail.CveID]; ok {
val.CpeURIs = util.AppendIfMissing(val.CpeURIs, cpe.CpeURI)
val.Confidences.AppendIfMissing(maxConfidence)
val.DistroAdvisories = advisories
r.ScannedCves[detail.CveID] = val
} else {
v := models.VulnInfo{
CveID: detail.CveID,
CpeURIs: []string{cpe.CpeURI},
Confidences: models.Confidences{maxConfidence},
DistroAdvisories: advisories,
}
r.ScannedCves[detail.CveID] = v
nCVEs++
}
}
}
logging.Log.Infof("%s: %d CVEs are detected with CPE", r.FormatServerName(), nCVEs)
return nil
}
func getMaxConfidence(detail cvemodels.CveDetail) (max models.Confidence) {
if !detail.HasNvd() && detail.HasJvn() {
return models.JvnVendorProductMatch
} else if detail.HasNvd() {
for _, nvd := range detail.Nvds {
confidence := models.Confidence{}
switch nvd.DetectionMethod {
case cvemodels.NvdExactVersionMatch:
confidence = models.NvdExactVersionMatch
case cvemodels.NvdRoughVersionMatch:
confidence = models.NvdRoughVersionMatch
case cvemodels.NvdVendorProductMatch:
confidence = models.NvdVendorProductMatch
}
if max.Score < confidence.Score {
max = confidence
}
}
}
return max
}
// FillCweDict fills CWE
func FillCweDict(r *models.ScanResult) {
uniqCweIDMap := map[string]bool{}
for _, vinfo := range r.ScannedCves {
for _, conts := range vinfo.CveContents {
for _, cont := range conts {
for _, id := range cont.CweIDs {
if strings.HasPrefix(id, "CWE-") {
id = strings.TrimPrefix(id, "CWE-")
uniqCweIDMap[id] = true
}
}
}
}
}
dict := map[string]models.CweDictEntry{}
for id := range uniqCweIDMap {
entry := models.CweDictEntry{
OwaspTopTens: map[string]string{},
CweTopTwentyfives: map[string]string{},
SansTopTwentyfives: map[string]string{},
}
if e, ok := cwe.CweDictEn[id]; ok {
fillCweRank(&entry, id)
entry.En = &e
} else {
logging.Log.Debugf("CWE-ID %s is not found in English CWE Dict", id)
entry.En = &cwe.Cwe{CweID: id}
}
if r.Lang == "ja" {
if e, ok := cwe.CweDictJa[id]; ok {
fillCweRank(&entry, id)
entry.Ja = &e
} else {
logging.Log.Debugf("CWE-ID %s is not found in Japanese CWE Dict", id)
entry.Ja = &cwe.Cwe{CweID: id}
}
}
dict[id] = entry
}
r.CweDict = dict
return
}
func fillCweRank(entry *models.CweDictEntry, id string) {
for year, ranks := range cwe.OwaspTopTens {
if rank, ok := ranks[id]; ok {
entry.OwaspTopTens[year] = rank
}
}
for year, ranks := range cwe.CweTopTwentyfives {
if rank, ok := ranks[id]; ok {
entry.CweTopTwentyfives[year] = rank
}
}
for year, ranks := range cwe.SansTopTwentyfives {
if rank, ok := ranks[id]; ok {
entry.SansTopTwentyfives[year] = rank
}
}
}

90
detector/detector_test.go Normal file
View File

@@ -0,0 +1,90 @@
//go:build !scanner
// +build !scanner
package detector
import (
"reflect"
"testing"
"github.com/future-architect/vuls/models"
cvemodels "github.com/vulsio/go-cve-dictionary/models"
)
func Test_getMaxConfidence(t *testing.T) {
type args struct {
detail cvemodels.CveDetail
}
tests := []struct {
name string
args args
wantMax models.Confidence
}{
{
name: "JvnVendorProductMatch",
args: args{
detail: cvemodels.CveDetail{
Nvds: []cvemodels.Nvd{},
Jvns: []cvemodels.Jvn{{}},
},
},
wantMax: models.JvnVendorProductMatch,
},
{
name: "NvdExactVersionMatch",
args: args{
detail: cvemodels.CveDetail{
Nvds: []cvemodels.Nvd{
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
{DetectionMethod: cvemodels.NvdVendorProductMatch},
{DetectionMethod: cvemodels.NvdExactVersionMatch},
},
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
},
},
wantMax: models.NvdExactVersionMatch,
},
{
name: "NvdRoughVersionMatch",
args: args{
detail: cvemodels.CveDetail{
Nvds: []cvemodels.Nvd{
{DetectionMethod: cvemodels.NvdRoughVersionMatch},
{DetectionMethod: cvemodels.NvdVendorProductMatch},
},
Jvns: []cvemodels.Jvn{},
},
},
wantMax: models.NvdRoughVersionMatch,
},
{
name: "NvdVendorProductMatch",
args: args{
detail: cvemodels.CveDetail{
Nvds: []cvemodels.Nvd{
{DetectionMethod: cvemodels.NvdVendorProductMatch},
},
Jvns: []cvemodels.Jvn{{DetectionMethod: cvemodels.JvnVendorProductMatch}},
},
},
wantMax: models.NvdVendorProductMatch,
},
{
name: "empty",
args: args{
detail: cvemodels.CveDetail{
Nvds: []cvemodels.Nvd{},
Jvns: []cvemodels.Jvn{},
},
},
wantMax: models.Confidence{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if gotMax := getMaxConfidence(tt.args.detail); !reflect.DeepEqual(gotMax, tt.wantMax) {
t.Errorf("getMaxConfidence() = %v, want %v", gotMax, tt.wantMax)
}
})
}
}

258
detector/exploitdb.go Normal file
View File

@@ -0,0 +1,258 @@
//go:build !scanner
// +build !scanner
package detector
import (
"encoding/json"
"net/http"
"time"
"github.com/cenkalti/backoff"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
exploitdb "github.com/vulsio/go-exploitdb/db"
exploitmodels "github.com/vulsio/go-exploitdb/models"
exploitlog "github.com/vulsio/go-exploitdb/util"
)
// goExploitDBClient is a DB Driver
type goExploitDBClient struct {
driver exploitdb.DB
baseURL string
}
// closeDB close a DB connection
func (client goExploitDBClient) closeDB() error {
if client.driver == nil {
return nil
}
return client.driver.CloseDB()
}
func newGoExploitDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goExploitDBClient, error) {
if err := exploitlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
return nil, xerrors.Errorf("Failed to set go-exploitdb logger. err: %w", err)
}
db, err := newExploitDB(cnf)
if err != nil {
return nil, xerrors.Errorf("Failed to newExploitDB. err: %w", err)
}
return &goExploitDBClient{driver: db, baseURL: cnf.GetURL()}, nil
}
// FillWithExploit fills exploit information that has in Exploit
func FillWithExploit(r *models.ScanResult, cnf config.ExploitConf, logOpts logging.LogOpts) (nExploitCve int, err error) {
client, err := newGoExploitDBClient(&cnf, logOpts)
if err != nil {
return 0, xerrors.Errorf("Failed to newGoExploitDBClient. err: %w", err)
}
defer func() {
if err := client.closeDB(); err != nil {
logging.Log.Errorf("Failed to close DB. err: %+v", err)
}
}()
if client.driver == nil {
var cveIDs []string
for cveID := range r.ScannedCves {
cveIDs = append(cveIDs, cveID)
}
prefix, err := util.URLPathJoin(client.baseURL, "cves")
if err != nil {
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
}
responses, err := getExploitsViaHTTP(cveIDs, prefix)
if err != nil {
return 0, xerrors.Errorf("Failed to get Exploits via HTTP. err: %w", err)
}
for _, res := range responses {
exps := []exploitmodels.Exploit{}
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
}
exploits := ConvertToModelsExploit(exps)
v, ok := r.ScannedCves[res.request.cveID]
if ok {
v.Exploits = exploits
}
r.ScannedCves[res.request.cveID] = v
nExploitCve++
}
} else {
for cveID, vuln := range r.ScannedCves {
if cveID == "" {
continue
}
es, err := client.driver.GetExploitByCveID(cveID)
if err != nil {
return 0, xerrors.Errorf("Failed to get Exploits by CVE-ID. err: %w", err)
}
if len(es) == 0 {
continue
}
exploits := ConvertToModelsExploit(es)
vuln.Exploits = exploits
r.ScannedCves[cveID] = vuln
nExploitCve++
}
}
return nExploitCve, nil
}
// ConvertToModelsExploit converts exploit model to vuls model
func ConvertToModelsExploit(es []exploitmodels.Exploit) (exploits []models.Exploit) {
for _, e := range es {
var documentURL, shellURL, paperURL, ghdbURL *string
if e.OffensiveSecurity != nil {
os := e.OffensiveSecurity
if os.Document != nil {
documentURL = &os.Document.FileURL
}
if os.ShellCode != nil {
shellURL = &os.ShellCode.FileURL
}
if os.Paper != nil {
paperURL = &os.Paper.FileURL
}
if os.GHDB != nil {
ghdbURL = &os.GHDB.Link
}
}
exploit := models.Exploit{
ExploitType: e.ExploitType,
ID: e.ExploitUniqueID,
URL: e.URL,
Description: e.Description,
DocumentURL: documentURL,
ShellCodeURL: shellURL,
PaperURL: paperURL,
GHDBURL: ghdbURL,
}
exploits = append(exploits, exploit)
}
return exploits
}
type exploitResponse struct {
request exploitRequest
json string
}
func getExploitsViaHTTP(cveIDs []string, urlPrefix string) (
responses []exploitResponse, err error) {
nReq := len(cveIDs)
reqChan := make(chan exploitRequest, nReq)
resChan := make(chan exploitResponse, nReq)
errChan := make(chan error, nReq)
defer close(reqChan)
defer close(resChan)
defer close(errChan)
go func() {
for _, cveID := range cveIDs {
reqChan <- exploitRequest{
cveID: cveID,
}
}
}()
concurrency := 10
tasks := util.GenWorkers(concurrency)
for i := 0; i < nReq; i++ {
tasks <- func() {
req := <-reqChan
url, err := util.URLPathJoin(
urlPrefix,
req.cveID,
)
if err != nil {
errChan <- err
} else {
logging.Log.Debugf("HTTP Request to %s", url)
httpGetExploit(url, req, resChan, errChan)
}
}
}
timeout := time.After(2 * 60 * time.Second)
var errs []error
for i := 0; i < nReq; i++ {
select {
case res := <-resChan:
responses = append(responses, res)
case err := <-errChan:
errs = append(errs, err)
case <-timeout:
return nil, xerrors.New("Timeout Fetching Exploit")
}
}
if len(errs) != 0 {
return nil, xerrors.Errorf("Failed to fetch Exploit. err: %w", errs)
}
return
}
type exploitRequest struct {
cveID string
}
func httpGetExploit(url string, req exploitRequest, resChan chan<- exploitResponse, errChan chan<- error) {
var body string
var errs []error
var resp *http.Response
count, retryMax := 0, 3
f := func() (err error) {
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
count++
if count == retryMax {
return nil
}
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
}
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
if err != nil {
errChan <- xerrors.Errorf("HTTP Error %w", err)
return
}
if count == retryMax {
errChan <- xerrors.New("Retry count exceeded")
return
}
resChan <- exploitResponse{
request: req,
json: body,
}
}
func newExploitDB(cnf config.VulnDictInterface) (exploitdb.DB, error) {
if cnf.IsFetchViaHTTP() {
return nil, nil
}
path := cnf.GetURL()
if cnf.GetType() == "sqlite3" {
path = cnf.GetSQLite3Path()
}
driver, err := exploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), exploitdb.Option{})
if err != nil {
if xerrors.Is(err, exploitdb.ErrDBLocked) {
return nil, xerrors.Errorf("Failed to init exploit DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
}
return nil, xerrors.Errorf("Failed to init exploit DB. DB Path: %s, err: %w", path, err)
}
return driver, nil
}

395
detector/github.go Normal file
View File

@@ -0,0 +1,395 @@
//go:build !scanner
// +build !scanner
package detector
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strconv"
"time"
"github.com/cenkalti/backoff"
"github.com/future-architect/vuls/errof"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"golang.org/x/oauth2"
)
// DetectGitHubSecurityAlerts access to owner/repo on GitHub and fetch security alerts of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
// https://help.github.com/articles/about-security-alerts-for-vulnerable-dependencies/
func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string, ignoreDismissed bool) (nCVEs int, err error) {
src := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: token},
)
//TODO Proxy
httpClient := oauth2.NewClient(context.Background(), src)
// TODO Use `https://github.com/shurcooL/githubv4` if the tool supports vulnerabilityAlerts Endpoint
// Memo : https://developer.github.com/v4/explorer/
const jsonfmt = `{"query":
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, states:[OPEN], %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } vulnerableManifestFilename vulnerableManifestPath vulnerableRequirements securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
after := ""
for {
jsonStr := fmt.Sprintf(jsonfmt, owner, repo, 100, after)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
"https://api.github.com/graphql",
bytes.NewBuffer([]byte(jsonStr)),
)
defer cancel()
if err != nil {
return 0, err
}
// https://developer.github.com/v4/previews/#repository-vulnerability-alerts
// To toggle this preview and access data, need to provide a custom media type in the Accept header:
// MEMO: I tried to get the affected version via GitHub API. Bit it seems difficult to determin the affected version if there are multiple dependency files such as package.json.
// TODO remove this header if it is no longer preview status in the future.
req.Header.Set("Accept", "application/vnd.github.package-deletes-preview+json")
req.Header.Set("Content-Type", "application/json")
resp, err := httpClient.Do(req)
if err != nil {
return 0, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return 0, err
}
alerts := SecurityAlerts{}
if err := json.Unmarshal(body, &alerts); err != nil {
return 0, err
}
// util.Log.Debugf("%s", pp.Sprint(alerts))
// util.Log.Debugf("%s", string(body))
if alerts.Data.Repository.URL == "" {
return 0, errof.New(errof.ErrFailedToAccessGithubAPI,
fmt.Sprintf("Failed to access to GitHub API. Response: %s", string(body)))
}
for _, v := range alerts.Data.Repository.VulnerabilityAlerts.Edges {
if ignoreDismissed && v.Node.DismissReason != "" {
continue
}
m := models.GitHubSecurityAlert{
Repository: alerts.Data.Repository.URL,
Package: models.GSAVulnerablePackage{
Name: v.Node.SecurityVulnerability.Package.Name,
Ecosystem: v.Node.SecurityVulnerability.Package.Ecosystem,
ManifestFilename: v.Node.VulnerableManifestFilename,
ManifestPath: v.Node.VulnerableManifestPath,
Requirements: v.Node.VulnerableRequirements,
},
FixedIn: v.Node.SecurityVulnerability.FirstPatchedVersion.Identifier,
AffectedRange: v.Node.SecurityVulnerability.VulnerableVersionRange,
Dismissed: len(v.Node.DismissReason) != 0,
DismissedAt: v.Node.DismissedAt,
DismissReason: v.Node.DismissReason,
}
cveIDs, other := []string{}, []string{}
for _, identifier := range v.Node.SecurityAdvisory.Identifiers {
if identifier.Type == "CVE" {
cveIDs = append(cveIDs, identifier.Value)
} else {
other = append(other, identifier.Value)
}
}
// If CVE-ID has not been assigned, use the GHSA ID etc as a ID.
if len(cveIDs) == 0 {
cveIDs = other
}
refs := []models.Reference{}
for _, r := range v.Node.SecurityAdvisory.References {
refs = append(refs, models.Reference{Link: r.URL})
}
for _, cveID := range cveIDs {
cveContent := models.CveContent{
Type: models.GitHub,
CveID: cveID,
Title: v.Node.SecurityAdvisory.Summary,
Summary: v.Node.SecurityAdvisory.Description,
Cvss2Severity: v.Node.SecurityVulnerability.Severity,
Cvss3Severity: v.Node.SecurityVulnerability.Severity,
SourceLink: v.Node.SecurityAdvisory.Permalink,
References: refs,
Published: v.Node.SecurityAdvisory.PublishedAt,
LastModified: v.Node.SecurityAdvisory.UpdatedAt,
}
if val, ok := r.ScannedCves[cveID]; ok {
val.GitHubSecurityAlerts = val.GitHubSecurityAlerts.Add(m)
val.CveContents[models.GitHub] = []models.CveContent{cveContent}
r.ScannedCves[cveID] = val
} else {
v := models.VulnInfo{
CveID: cveID,
Confidences: models.Confidences{models.GitHubMatch},
GitHubSecurityAlerts: models.GitHubSecurityAlerts{m},
CveContents: models.NewCveContents(cveContent),
}
r.ScannedCves[cveID] = v
}
nCVEs++
}
}
if !alerts.Data.Repository.VulnerabilityAlerts.PageInfo.HasNextPage {
break
}
after = fmt.Sprintf(`after: \"%s\"`, alerts.Data.Repository.VulnerabilityAlerts.PageInfo.EndCursor)
}
return nCVEs, err
}
// SecurityAlerts has detected CVE-IDs, PackageNames, Refs
type SecurityAlerts struct {
Data struct {
Repository struct {
URL string `json:"url"`
VulnerabilityAlerts struct {
PageInfo struct {
EndCursor string `json:"endCursor"`
HasNextPage bool `json:"hasNextPage"`
StartCursor string `json:"startCursor"`
} `json:"pageInfo"`
Edges []struct {
Node struct {
ID string `json:"id"`
DismissReason string `json:"dismissReason"`
DismissedAt time.Time `json:"dismissedAt"`
SecurityVulnerability struct {
Package struct {
Name string `json:"name"`
Ecosystem string `json:"ecosystem"`
} `json:"package"`
Severity string `json:"severity"`
VulnerableVersionRange string `json:"vulnerableVersionRange"`
FirstPatchedVersion struct {
Identifier string `json:"identifier"`
} `json:"firstPatchedVersion"`
} `json:"securityVulnerability"`
VulnerableManifestFilename string `json:"vulnerableManifestFilename"`
VulnerableManifestPath string `json:"vulnerableManifestPath"`
VulnerableRequirements string `json:"vulnerableRequirements"`
SecurityAdvisory struct {
Description string `json:"description"`
GhsaID string `json:"ghsaId"`
Permalink string `json:"permalink"`
PublishedAt time.Time `json:"publishedAt"`
Summary string `json:"summary"`
UpdatedAt time.Time `json:"updatedAt"`
WithdrawnAt time.Time `json:"withdrawnAt"`
Origin string `json:"origin"`
Severity string `json:"severity"`
References []struct {
URL string `json:"url"`
} `json:"references"`
Identifiers []struct {
Type string `json:"type"`
Value string `json:"value"`
} `json:"identifiers"`
} `json:"securityAdvisory"`
} `json:"node"`
} `json:"edges"`
} `json:"vulnerabilityAlerts"`
} `json:"repository"`
} `json:"data"`
}
// DetectGitHubDependencyGraph access to owner/repo on GitHub and fetch dependency graph of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
// https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-the-dependency-graph
func DetectGitHubDependencyGraph(r *models.ScanResult, owner, repo, token string) (err error) {
src := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: token},
)
//TODO Proxy
httpClient := oauth2.NewClient(context.Background(), src)
return fetchDependencyGraph(r, httpClient, owner, repo, "", "", 10, 100)
}
// recursive function
func fetchDependencyGraph(r *models.ScanResult, httpClient *http.Client, owner, repo, after, dependenciesAfter string, first, dependenciesFirst int) (err error) {
const queryFmt = `{"query":
"query { repository(owner:\"%s\", name:\"%s\") { url dependencyGraphManifests(first: %d, withDependencies: true%s) { pageInfo { endCursor hasNextPage } edges { node { blobPath filename repository { url } parseable exceedsMaxSize dependenciesCount dependencies(first: %d%s) { pageInfo { endCursor hasNextPage } edges { node { packageName packageManager repository { url } requirements hasDependencies } } } } } } } }"}`
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
defer cancel()
var graph DependencyGraph
rateLimitRemaining := 5000
count, retryMax := 0, 10
retryCheck := func(err error) error {
if count == retryMax {
return backoff.Permanent(err)
}
if rateLimitRemaining == 0 {
// The GraphQL API rate limit is 5,000 points per hour.
// Terminate with an error on rate limit reached.
return backoff.Permanent(errof.New(errof.ErrFailedToAccessGithubAPI,
fmt.Sprintf("rate limit exceeded. error: %s", err.Error())))
}
return err
}
operation := func() error {
count++
queryStr := fmt.Sprintf(queryFmt, owner, repo, first, after, dependenciesFirst, dependenciesAfter)
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
"https://api.github.com/graphql",
bytes.NewBuffer([]byte(queryStr)),
)
if err != nil {
return retryCheck(err)
}
// https://docs.github.com/en/graphql/overview/schema-previews#access-to-a-repository-s-dependency-graph-preview
// TODO remove this header if it is no longer preview status in the future.
req.Header.Set("Accept", "application/vnd.github.hawkgirl-preview+json")
req.Header.Set("Content-Type", "application/json")
resp, err := httpClient.Do(req)
if err != nil {
return retryCheck(err)
}
defer resp.Body.Close()
// https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit
if rateLimitRemaining, err = strconv.Atoi(resp.Header.Get("X-RateLimit-Remaining")); err != nil {
// If the header retrieval fails, rateLimitRemaining will be set to 0,
// preventing further retries. To enable retry, we reset it to 5000.
rateLimitRemaining = 5000
return retryCheck(errof.New(errof.ErrFailedToAccessGithubAPI, "Failed to get X-RateLimit-Remaining header"))
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return retryCheck(err)
}
graph = DependencyGraph{}
if err := json.Unmarshal(body, &graph); err != nil {
return retryCheck(err)
}
if len(graph.Errors) > 0 || graph.Data.Repository.URL == "" {
// this mainly occurs on timeout
// reduce the number of dependencies to be fetched for the next retry
if dependenciesFirst > 50 {
dependenciesFirst -= 5
}
return retryCheck(errof.New(errof.ErrFailedToAccessGithubAPI,
fmt.Sprintf("Failed to access to GitHub API. Repository: %s/%s; Response: %s", owner, repo, string(body))))
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed attempts (count: %d). retrying in %s. err: %+v", count, t, err)
}
if err = backoff.RetryNotify(operation, backoff.NewExponentialBackOff(), notify); err != nil {
return err
}
dependenciesAfter = ""
for _, m := range graph.Data.Repository.DependencyGraphManifests.Edges {
manifest, ok := r.GitHubManifests[m.Node.BlobPath]
if !ok {
manifest = models.DependencyGraphManifest{
BlobPath: m.Node.BlobPath,
Filename: m.Node.Filename,
Repository: m.Node.Repository.URL,
Dependencies: []models.Dependency{},
}
}
for _, d := range m.Node.Dependencies.Edges {
manifest.Dependencies = append(manifest.Dependencies, models.Dependency{
PackageName: d.Node.PackageName,
PackageManager: d.Node.PackageManager,
Repository: d.Node.Repository.URL,
Requirements: d.Node.Requirements,
})
}
r.GitHubManifests[m.Node.BlobPath] = manifest
if m.Node.Dependencies.PageInfo.HasNextPage {
dependenciesAfter = fmt.Sprintf(`, after: \"%s\"`, m.Node.Dependencies.PageInfo.EndCursor)
}
}
if dependenciesAfter != "" {
return fetchDependencyGraph(r, httpClient, owner, repo, after, dependenciesAfter, first, dependenciesFirst)
}
if graph.Data.Repository.DependencyGraphManifests.PageInfo.HasNextPage {
after = fmt.Sprintf(`, after: \"%s\"`, graph.Data.Repository.DependencyGraphManifests.PageInfo.EndCursor)
return fetchDependencyGraph(r, httpClient, owner, repo, after, dependenciesAfter, first, dependenciesFirst)
}
return nil
}
// DependencyGraph is a GitHub API response
type DependencyGraph struct {
Data struct {
Repository struct {
URL string `json:"url"`
DependencyGraphManifests struct {
PageInfo struct {
EndCursor string `json:"endCursor"`
HasNextPage bool `json:"hasNextPage"`
} `json:"pageInfo"`
Edges []struct {
Node struct {
BlobPath string `json:"blobPath"`
Filename string `json:"filename"`
Repository struct {
URL string `json:"url"`
}
Parseable bool `json:"parseable"`
ExceedsMaxSize bool `json:"exceedsMaxSize"`
DependenciesCount int `json:"dependenciesCount"`
Dependencies struct {
PageInfo struct {
EndCursor string `json:"endCursor"`
HasNextPage bool `json:"hasNextPage"`
} `json:"pageInfo"`
Edges []struct {
Node struct {
PackageName string `json:"packageName"`
PackageManager string `json:"packageManager"`
Repository struct {
URL string `json:"url"`
}
Requirements string `json:"requirements"`
HasDependencies bool `json:"hasDependencies"`
} `json:"node"`
} `json:"edges"`
} `json:"dependencies"`
} `json:"node"`
} `json:"edges"`
} `json:"dependencyGraphManifests"`
} `json:"repository"`
} `json:"data"`
Errors []struct {
Type string `json:"type,omitempty"`
Path []interface{} `json:"path,omitempty"`
Locations []struct {
Line int `json:"line"`
Column int `json:"column"`
} `json:"locations,omitempty"`
Message string `json:"message"`
} `json:"errors,omitempty"`
}

245
detector/kevuln.go Normal file
View File

@@ -0,0 +1,245 @@
//go:build !scanner
// +build !scanner
package detector
import (
"encoding/json"
"net/http"
"time"
"github.com/cenkalti/backoff"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
kevulndb "github.com/vulsio/go-kev/db"
kevulnmodels "github.com/vulsio/go-kev/models"
kevulnlog "github.com/vulsio/go-kev/utils"
)
// goKEVulnDBClient is a DB Driver
type goKEVulnDBClient struct {
driver kevulndb.DB
baseURL string
}
// closeDB close a DB connection
func (client goKEVulnDBClient) closeDB() error {
if client.driver == nil {
return nil
}
return client.driver.CloseDB()
}
func newGoKEVulnDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goKEVulnDBClient, error) {
if err := kevulnlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
return nil, xerrors.Errorf("Failed to set go-kev logger. err: %w", err)
}
db, err := newKEVulnDB(cnf)
if err != nil {
return nil, xerrors.Errorf("Failed to newKEVulnDB. err: %w", err)
}
return &goKEVulnDBClient{driver: db, baseURL: cnf.GetURL()}, nil
}
// FillWithKEVuln :
func FillWithKEVuln(r *models.ScanResult, cnf config.KEVulnConf, logOpts logging.LogOpts) error {
client, err := newGoKEVulnDBClient(&cnf, logOpts)
if err != nil {
return err
}
defer func() {
if err := client.closeDB(); err != nil {
logging.Log.Errorf("Failed to close DB. err: %+v", err)
}
}()
nKEV := 0
if client.driver == nil {
var cveIDs []string
for cveID := range r.ScannedCves {
cveIDs = append(cveIDs, cveID)
}
prefix, err := util.URLPathJoin(client.baseURL, "cves")
if err != nil {
return err
}
responses, err := getKEVulnsViaHTTP(cveIDs, prefix)
if err != nil {
return err
}
for _, res := range responses {
kevulns := []kevulnmodels.KEVuln{}
if err := json.Unmarshal([]byte(res.json), &kevulns); err != nil {
return err
}
alerts := []models.Alert{}
if len(kevulns) > 0 {
alerts = append(alerts, models.Alert{
Title: "Known Exploited Vulnerabilities Catalog",
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
Team: "cisa",
})
}
v, ok := r.ScannedCves[res.request.cveID]
if ok {
v.AlertDict.CISA = alerts
nKEV++
}
r.ScannedCves[res.request.cveID] = v
}
} else {
for cveID, vuln := range r.ScannedCves {
if cveID == "" {
continue
}
kevulns, err := client.driver.GetKEVulnByCveID(cveID)
if err != nil {
return err
}
if len(kevulns) == 0 {
continue
}
alerts := []models.Alert{}
if len(kevulns) > 0 {
alerts = append(alerts, models.Alert{
Title: "Known Exploited Vulnerabilities Catalog",
URL: "https://www.cisa.gov/known-exploited-vulnerabilities-catalog",
Team: "cisa",
})
}
vuln.AlertDict.CISA = alerts
nKEV++
r.ScannedCves[cveID] = vuln
}
}
logging.Log.Infof("%s: Known Exploited Vulnerabilities are detected for %d CVEs", r.FormatServerName(), nKEV)
return nil
}
type kevulnResponse struct {
request kevulnRequest
json string
}
func getKEVulnsViaHTTP(cveIDs []string, urlPrefix string) (
responses []kevulnResponse, err error) {
nReq := len(cveIDs)
reqChan := make(chan kevulnRequest, nReq)
resChan := make(chan kevulnResponse, nReq)
errChan := make(chan error, nReq)
defer close(reqChan)
defer close(resChan)
defer close(errChan)
go func() {
for _, cveID := range cveIDs {
reqChan <- kevulnRequest{
cveID: cveID,
}
}
}()
concurrency := 10
tasks := util.GenWorkers(concurrency)
for i := 0; i < nReq; i++ {
tasks <- func() {
req := <-reqChan
url, err := util.URLPathJoin(
urlPrefix,
req.cveID,
)
if err != nil {
errChan <- err
} else {
logging.Log.Debugf("HTTP Request to %s", url)
httpGetKEVuln(url, req, resChan, errChan)
}
}
}
timeout := time.After(2 * 60 * time.Second)
var errs []error
for i := 0; i < nReq; i++ {
select {
case res := <-resChan:
responses = append(responses, res)
case err := <-errChan:
errs = append(errs, err)
case <-timeout:
return nil, xerrors.New("Timeout Fetching KEVuln")
}
}
if len(errs) != 0 {
return nil, xerrors.Errorf("Failed to fetch KEVuln. err: %w", errs)
}
return
}
type kevulnRequest struct {
cveID string
}
func httpGetKEVuln(url string, req kevulnRequest, resChan chan<- kevulnResponse, errChan chan<- error) {
var body string
var errs []error
var resp *http.Response
count, retryMax := 0, 3
f := func() (err error) {
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
count++
if count == retryMax {
return nil
}
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
}
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
if err != nil {
errChan <- xerrors.Errorf("HTTP Error %w", err)
return
}
if count == retryMax {
errChan <- xerrors.New("Retry count exceeded")
return
}
resChan <- kevulnResponse{
request: req,
json: body,
}
}
func newKEVulnDB(cnf config.VulnDictInterface) (kevulndb.DB, error) {
if cnf.IsFetchViaHTTP() {
return nil, nil
}
path := cnf.GetURL()
if cnf.GetType() == "sqlite3" {
path = cnf.GetSQLite3Path()
}
driver, err := kevulndb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), kevulndb.Option{})
if err != nil {
if xerrors.Is(err, kevulndb.ErrDBLocked) {
return nil, xerrors.Errorf("Failed to init kevuln DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
}
return nil, xerrors.Errorf("Failed to init kevuln DB. DB Path: %s, err: %w", path, err)
}
return driver, nil
}

97
detector/library.go Normal file
View File

@@ -0,0 +1,97 @@
//go:build !scanner
// +build !scanner
package detector
import (
"context"
trivydb "github.com/aquasecurity/trivy-db/pkg/db"
"github.com/aquasecurity/trivy-db/pkg/metadata"
"github.com/aquasecurity/trivy/pkg/db"
"github.com/aquasecurity/trivy/pkg/log"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
)
// DetectLibsCves fills LibraryScanner information
func DetectLibsCves(r *models.ScanResult, cacheDir string, noProgress bool) (err error) {
totalCnt := 0
if len(r.LibraryScanners) == 0 {
return
}
// initialize trivy's logger and db
err = log.InitLogger(false, false)
if err != nil {
return err
}
logging.Log.Info("Updating library db...")
if err := downloadDB("", cacheDir, noProgress, false); err != nil {
return err
}
if err := trivydb.Init(cacheDir); err != nil {
return err
}
defer trivydb.Close()
for _, lib := range r.LibraryScanners {
vinfos, err := lib.Scan()
if err != nil {
return err
}
for _, vinfo := range vinfos {
vinfo.Confidences.AppendIfMissing(models.TrivyMatch)
if v, ok := r.ScannedCves[vinfo.CveID]; !ok {
r.ScannedCves[vinfo.CveID] = vinfo
} else {
v.LibraryFixedIns = append(v.LibraryFixedIns, vinfo.LibraryFixedIns...)
r.ScannedCves[vinfo.CveID] = v
}
}
totalCnt += len(vinfos)
}
logging.Log.Infof("%s: %d CVEs are detected with Library",
r.FormatServerName(), totalCnt)
return nil
}
func downloadDB(appVersion, cacheDir string, quiet, skipUpdate bool) error {
client := db.NewClient(cacheDir, quiet, false)
ctx := context.Background()
needsUpdate, err := client.NeedsUpdate(appVersion, skipUpdate)
if err != nil {
return xerrors.Errorf("database error: %w", err)
}
if needsUpdate {
logging.Log.Info("Need to update DB")
logging.Log.Info("Downloading DB...")
if err := client.Download(ctx, cacheDir); err != nil {
return xerrors.Errorf("failed to download vulnerability DB: %w", err)
}
}
// for debug
if err := showDBInfo(cacheDir); err != nil {
return xerrors.Errorf("failed to show database info: %w", err)
}
return nil
}
func showDBInfo(cacheDir string) error {
m := metadata.NewClient(cacheDir)
meta, err := m.Get()
if err != nil {
return xerrors.Errorf("something wrong with DB: %w", err)
}
log.Logger.Debugf("DB Schema: %d, UpdatedAt: %s, NextUpdate: %s, DownloadedAt: %s",
meta.Version, meta.UpdatedAt, meta.NextUpdate, meta.DownloadedAt)
return nil
}

244
detector/msf.go Normal file
View File

@@ -0,0 +1,244 @@
//go:build !scanner
// +build !scanner
package detector
import (
"encoding/json"
"net/http"
"time"
"github.com/cenkalti/backoff"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
metasploitdb "github.com/vulsio/go-msfdb/db"
metasploitmodels "github.com/vulsio/go-msfdb/models"
metasploitlog "github.com/vulsio/go-msfdb/utils"
)
// goMetasploitDBClient is a DB Driver
type goMetasploitDBClient struct {
driver metasploitdb.DB
baseURL string
}
// closeDB close a DB connection
func (client goMetasploitDBClient) closeDB() error {
if client.driver == nil {
return nil
}
return client.driver.CloseDB()
}
func newGoMetasploitDBClient(cnf config.VulnDictInterface, o logging.LogOpts) (*goMetasploitDBClient, error) {
if err := metasploitlog.SetLogger(o.LogToFile, o.LogDir, o.Debug, o.LogJSON); err != nil {
return nil, xerrors.Errorf("Failed to set go-msfdb logger. err: %w", err)
}
db, err := newMetasploitDB(cnf)
if err != nil {
return nil, xerrors.Errorf("Failed to newMetasploitDB. err: %w", err)
}
return &goMetasploitDBClient{driver: db, baseURL: cnf.GetURL()}, nil
}
// FillWithMetasploit fills metasploit module information that has in module
func FillWithMetasploit(r *models.ScanResult, cnf config.MetasploitConf, logOpts logging.LogOpts) (nMetasploitCve int, err error) {
client, err := newGoMetasploitDBClient(&cnf, logOpts)
if err != nil {
return 0, xerrors.Errorf("Failed to newGoMetasploitDBClient. err: %w", err)
}
defer func() {
if err := client.closeDB(); err != nil {
logging.Log.Errorf("Failed to close DB. err: %+v", err)
}
}()
if client.driver == nil {
var cveIDs []string
for cveID := range r.ScannedCves {
cveIDs = append(cveIDs, cveID)
}
prefix, err := util.URLPathJoin(client.baseURL, "cves")
if err != nil {
return 0, xerrors.Errorf("Failed to join URLPath. err: %w", err)
}
responses, err := getMetasploitsViaHTTP(cveIDs, prefix)
if err != nil {
return 0, xerrors.Errorf("Failed to get Metasploits via HTTP. err: %w", err)
}
for _, res := range responses {
msfs := []metasploitmodels.Metasploit{}
if err := json.Unmarshal([]byte(res.json), &msfs); err != nil {
return 0, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
}
metasploits := ConvertToModelsMsf(msfs)
v, ok := r.ScannedCves[res.request.cveID]
if ok {
v.Metasploits = metasploits
}
r.ScannedCves[res.request.cveID] = v
nMetasploitCve++
}
} else {
for cveID, vuln := range r.ScannedCves {
if cveID == "" {
continue
}
ms, err := client.driver.GetModuleByCveID(cveID)
if err != nil {
return 0, xerrors.Errorf("Failed to get Metasploits by CVE-ID. err: %w", err)
}
if len(ms) == 0 {
continue
}
modules := ConvertToModelsMsf(ms)
vuln.Metasploits = modules
r.ScannedCves[cveID] = vuln
nMetasploitCve++
}
}
return nMetasploitCve, nil
}
type metasploitResponse struct {
request metasploitRequest
json string
}
func getMetasploitsViaHTTP(cveIDs []string, urlPrefix string) (
responses []metasploitResponse, err error) {
nReq := len(cveIDs)
reqChan := make(chan metasploitRequest, nReq)
resChan := make(chan metasploitResponse, nReq)
errChan := make(chan error, nReq)
defer close(reqChan)
defer close(resChan)
defer close(errChan)
go func() {
for _, cveID := range cveIDs {
reqChan <- metasploitRequest{
cveID: cveID,
}
}
}()
concurrency := 10
tasks := util.GenWorkers(concurrency)
for i := 0; i < nReq; i++ {
tasks <- func() {
req := <-reqChan
url, err := util.URLPathJoin(
urlPrefix,
req.cveID,
)
if err != nil {
errChan <- err
} else {
logging.Log.Debugf("HTTP Request to %s", url)
httpGetMetasploit(url, req, resChan, errChan)
}
}
}
timeout := time.After(2 * 60 * time.Second)
var errs []error
for i := 0; i < nReq; i++ {
select {
case res := <-resChan:
responses = append(responses, res)
case err := <-errChan:
errs = append(errs, err)
case <-timeout:
return nil, xerrors.New("Timeout Fetching Metasploit")
}
}
if len(errs) != 0 {
return nil, xerrors.Errorf("Failed to fetch Metasploit. err: %w", errs)
}
return
}
type metasploitRequest struct {
cveID string
}
func httpGetMetasploit(url string, req metasploitRequest, resChan chan<- metasploitResponse, errChan chan<- error) {
var body string
var errs []error
var resp *http.Response
count, retryMax := 0, 3
f := func() (err error) {
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
resp, body, errs = gorequest.New().Timeout(10 * time.Second).Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
count++
if count == retryMax {
return nil
}
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %+v", url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
logging.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %+v", t, err)
}
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
if err != nil {
errChan <- xerrors.Errorf("HTTP Error %w", err)
return
}
if count == retryMax {
errChan <- xerrors.New("Retry count exceeded")
return
}
resChan <- metasploitResponse{
request: req,
json: body,
}
}
// ConvertToModelsMsf converts metasploit model to vuls model
func ConvertToModelsMsf(ms []metasploitmodels.Metasploit) (modules []models.Metasploit) {
for _, m := range ms {
var links []string
if 0 < len(m.References) {
for _, u := range m.References {
links = append(links, u.Link)
}
}
module := models.Metasploit{
Name: m.Name,
Title: m.Title,
Description: m.Description,
URLs: links,
}
modules = append(modules, module)
}
return modules
}
func newMetasploitDB(cnf config.VulnDictInterface) (metasploitdb.DB, error) {
if cnf.IsFetchViaHTTP() {
return nil, nil
}
path := cnf.GetURL()
if cnf.GetType() == "sqlite3" {
path = cnf.GetSQLite3Path()
}
driver, err := metasploitdb.NewDB(cnf.GetType(), path, cnf.GetDebugSQL(), metasploitdb.Option{})
if err != nil {
if xerrors.Is(err, metasploitdb.ErrDBLocked) {
return nil, xerrors.Errorf("Failed to init metasploit DB. SQLite3: %s is locked. err: %w", cnf.GetSQLite3Path(), err)
}
return nil, xerrors.Errorf("Failed to init metasploit DB. DB Path: %s, err: %w", path, err)
}
return driver, nil
}

261
detector/util.go Normal file
View File

@@ -0,0 +1,261 @@
//go:build !scanner
// +build !scanner
package detector
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"reflect"
"sort"
"time"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/constant"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"golang.org/x/xerrors"
)
func reuseScannedCves(r *models.ScanResult) bool {
switch r.Family {
case constant.FreeBSD, constant.Raspbian:
return true
}
return r.ScannedBy == "trivy"
}
func needToRefreshCve(r models.ScanResult) bool {
for _, cve := range r.ScannedCves {
if 0 < len(cve.CveContents) {
return false
}
}
return true
}
func loadPrevious(currs models.ScanResults, resultsDir string) (prevs models.ScanResults, err error) {
dirs, err := ListValidJSONDirs(resultsDir)
if err != nil {
return
}
for _, result := range currs {
filename := result.ServerName + ".json"
if result.Container.Name != "" {
filename = fmt.Sprintf("%s@%s.json", result.Container.Name, result.ServerName)
}
for _, dir := range dirs[1:] {
path := filepath.Join(dir, filename)
r, err := loadOneServerScanResult(path)
if err != nil {
logging.Log.Debugf("%+v", err)
continue
}
if r.Family == result.Family && r.Release == result.Release {
prevs = append(prevs, *r)
logging.Log.Infof("Previous json found: %s", path)
break
}
logging.Log.Infof("Previous json is different family.Release: %s, pre: %s.%s cur: %s.%s",
path, r.Family, r.Release, result.Family, result.Release)
}
}
return prevs, nil
}
func diff(curResults, preResults models.ScanResults, isPlus, isMinus bool) (diffed models.ScanResults) {
for _, current := range curResults {
found := false
var previous models.ScanResult
for _, r := range preResults {
if current.ServerName == r.ServerName && current.Container.Name == r.Container.Name {
found = true
previous = r
break
}
}
if !found {
diffed = append(diffed, current)
continue
}
cves := models.VulnInfos{}
if isPlus {
cves = getPlusDiffCves(previous, current)
}
if isMinus {
minus := getMinusDiffCves(previous, current)
if len(cves) == 0 {
cves = minus
} else {
for k, v := range minus {
cves[k] = v
}
}
}
packages := models.Packages{}
for _, s := range cves {
for _, affected := range s.AffectedPackages {
var p models.Package
if s.DiffStatus == models.DiffPlus {
p = current.Packages[affected.Name]
} else {
p = previous.Packages[affected.Name]
}
packages[affected.Name] = p
}
}
current.ScannedCves = cves
current.Packages = packages
diffed = append(diffed, current)
}
return
}
func getPlusDiffCves(previous, current models.ScanResult) models.VulnInfos {
previousCveIDsSet := map[string]bool{}
for _, previousVulnInfo := range previous.ScannedCves {
previousCveIDsSet[previousVulnInfo.CveID] = true
}
newer := models.VulnInfos{}
updated := models.VulnInfos{}
for _, v := range current.ScannedCves {
if previousCveIDsSet[v.CveID] {
if isCveInfoUpdated(v.CveID, previous, current) {
v.DiffStatus = models.DiffPlus
updated[v.CveID] = v
logging.Log.Debugf("updated: %s", v.CveID)
// TODO commented out because a bug of diff logic when multiple oval defs found for a certain CVE-ID and same updated_at
// if these OVAL defs have different affected packages, this logic detects as updated.
// This logic will be uncomented after integration with gost https://github.com/vulsio/gost
// } else if isCveFixed(v, previous) {
// updated[v.CveID] = v
// logging.Log.Debugf("fixed: %s", v.CveID)
} else {
logging.Log.Debugf("same: %s", v.CveID)
}
} else {
logging.Log.Debugf("newer: %s", v.CveID)
v.DiffStatus = models.DiffPlus
newer[v.CveID] = v
}
}
if len(updated) == 0 && len(newer) == 0 {
logging.Log.Infof("%s: There are %d vulnerabilities, but no difference between current result and previous one.", current.FormatServerName(), len(current.ScannedCves))
}
for cveID, vuln := range newer {
updated[cveID] = vuln
}
return updated
}
func getMinusDiffCves(previous, current models.ScanResult) models.VulnInfos {
currentCveIDsSet := map[string]bool{}
for _, currentVulnInfo := range current.ScannedCves {
currentCveIDsSet[currentVulnInfo.CveID] = true
}
clear := models.VulnInfos{}
for _, v := range previous.ScannedCves {
if !currentCveIDsSet[v.CveID] {
v.DiffStatus = models.DiffMinus
clear[v.CveID] = v
logging.Log.Debugf("clear: %s", v.CveID)
}
}
if len(clear) == 0 {
logging.Log.Infof("%s: There are %d vulnerabilities, but no difference between current result and previous one.", current.FormatServerName(), len(current.ScannedCves))
}
return clear
}
func isCveInfoUpdated(cveID string, previous, current models.ScanResult) bool {
cTypes := append([]models.CveContentType{models.Nvd, models.Jvn}, models.GetCveContentTypes(current.Family)...)
prevLastModified := map[models.CveContentType][]time.Time{}
preVinfo, ok := previous.ScannedCves[cveID]
if !ok {
return true
}
for _, cType := range cTypes {
if conts, ok := preVinfo.CveContents[cType]; ok {
for _, cont := range conts {
prevLastModified[cType] = append(prevLastModified[cType], cont.LastModified)
}
}
}
curLastModified := map[models.CveContentType][]time.Time{}
curVinfo, ok := current.ScannedCves[cveID]
if !ok {
return true
}
for _, cType := range cTypes {
if conts, ok := curVinfo.CveContents[cType]; ok {
for _, cont := range conts {
curLastModified[cType] = append(curLastModified[cType], cont.LastModified)
}
}
}
for _, t := range cTypes {
if !reflect.DeepEqual(curLastModified[t], prevLastModified[t]) {
logging.Log.Debugf("%s LastModified not equal: \n%s\n%s",
cveID, curLastModified[t], prevLastModified[t])
return true
}
}
return false
}
// ListValidJSONDirs returns valid json directory as array
// Returned array is sorted so that recent directories are at the head
func ListValidJSONDirs(resultsDir string) (dirs []string, err error) {
dirInfo, err := os.ReadDir(resultsDir)
if err != nil {
return nil, xerrors.Errorf("Failed to read %s: %w", config.Conf.ResultsDir, err)
}
for _, d := range dirInfo {
if !d.IsDir() {
continue
}
for _, layout := range []string{"2006-01-02T15:04:05Z", "2006-01-02T15:04:05-07:00", "2006-01-02T15-04-05-0700"} {
if _, err := time.Parse(layout, d.Name()); err == nil {
dirs = append(dirs, filepath.Join(resultsDir, d.Name()))
break
}
}
}
sort.Slice(dirs, func(i, j int) bool {
return dirs[j] < dirs[i]
})
return
}
// loadOneServerScanResult read JSON data of one server
func loadOneServerScanResult(jsonFile string) (*models.ScanResult, error) {
var (
data []byte
err error
)
if data, err = os.ReadFile(jsonFile); err != nil {
return nil, xerrors.Errorf("Failed to read %s: %w", jsonFile, err)
}
result := &models.ScanResult{}
if err := json.Unmarshal(data, result); err != nil {
return nil, xerrors.Errorf("Failed to parse %s: %w", jsonFile, err)
}
return result, nil
}

View File

@@ -1,22 +1,27 @@
package wordpress
//go:build !scanner
// +build !scanner
package detector
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"strings"
"time"
c "github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/errof"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
version "github.com/hashicorp/go-version"
"golang.org/x/xerrors"
)
//WpCveInfos is for wpscan json
// WpCveInfos is for wpscan json
type WpCveInfos struct {
ReleaseDate string `json:"release_date"`
ChangelogURL string `json:"changelog_url"`
@@ -28,19 +33,18 @@ type WpCveInfos struct {
Error string `json:"error"`
}
//WpCveInfo is for wpscan json
// WpCveInfo is for wpscan json
type WpCveInfo struct {
ID string `json:"id"`
Title string `json:"title"`
CreatedAt string `json:"created_at"`
UpdatedAt string `json:"updated_at"`
// PublishedDate string `json:"published_date"`
ID string `json:"id"`
Title string `json:"title"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
VulnType string `json:"vuln_type"`
References References `json:"references"`
FixedIn string `json:"fixed_in"`
}
//References is for wpscan json
// References is for wpscan json
type References struct {
URL []string `json:"url"`
Cve []string `json:"cve"`
@@ -49,18 +53,18 @@ type References struct {
// DetectWordPressCves access to wpscan and fetch scurity alerts and then set to the given ScanResult.
// https://wpscan.com/
// TODO move to report
func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
func detectWordPressCves(r *models.ScanResult, cnf config.WpScanConf) (int, error) {
if len(r.WordPressPackages) == 0 {
return 0, nil
}
// Core
ver := strings.Replace(r.WordPressPackages.CoreVersion(), ".", "", -1)
if ver == "" {
return 0, xerrors.New("Failed to get WordPress core version")
return 0, errof.New(errof.ErrFailedToAccessWpScan,
fmt.Sprintf("Failed to get WordPress core version."))
}
url := fmt.Sprintf("https://wpscan.com/api/v3/wordpresses/%s", ver)
wpVinfos, err := wpscan(url, ver, cnf.Token)
wpVinfos, err := wpscan(url, ver, cnf.Token, true)
if err != nil {
return 0, err
}
@@ -72,7 +76,7 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
}
for _, p := range themes {
url := fmt.Sprintf("https://wpscan.com/api/v3/themes/%s", p.Name)
candidates, err := wpscan(url, p.Name, cnf.Token)
candidates, err := wpscan(url, p.Name, cnf.Token, false)
if err != nil {
return 0, err
}
@@ -87,7 +91,7 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
}
for _, p := range plugins {
url := fmt.Sprintf("https://wpscan.com/api/v3/plugins/%s", p.Name)
candidates, err := wpscan(url, p.Name, cnf.Token)
candidates, err := wpscan(url, p.Name, cnf.Token, false)
if err != nil {
return 0, err
}
@@ -109,14 +113,16 @@ func DetectWordPressCves(r *models.ScanResult, cnf *c.WpScanConf) (int, error) {
return len(wpVinfos), nil
}
func wpscan(url, name, token string) (vinfos []models.VulnInfo, err error) {
func wpscan(url, name, token string, isCore bool) (vinfos []models.VulnInfo, err error) {
body, err := httpRequest(url, token)
if err != nil {
return nil, errof.New(errof.ErrFailedToAccessWpScan,
fmt.Sprintf("Failed to access to wpscan.comm. body: %s, err: %s", string(body), err))
return nil, err
}
if body == "" {
util.Log.Debugf("wpscan.com response body is empty. URL: %s", url)
logging.Log.Debugf("wpscan.com response body is empty. URL: %s", url)
}
if isCore {
name = "core"
}
return convertToVinfos(name, body)
}
@@ -126,17 +132,17 @@ func detect(installed models.WpPackage, candidates []models.VulnInfo) (vulns []m
for _, fixstat := range v.WpPackageFixStats {
ok, err := match(installed.Version, fixstat.FixedIn)
if err != nil {
util.Log.Errorf("Failed to compare versions %s installed: %s, fixedIn: %s, v: %+v",
logging.Log.Warnf("Failed to compare versions %s installed: %s, fixedIn: %s, v: %+v",
installed.Name, installed.Version, fixstat.FixedIn, v)
// continue scanning
continue
}
if ok {
vulns = append(vulns, v)
util.Log.Debugf("Affected: %s installed: %s, fixedIn: %s",
logging.Log.Debugf("Affected: %s installed: %s, fixedIn: %s",
installed.Name, installed.Version, fixstat.FixedIn)
} else {
util.Log.Debugf("Not affected: %s : %s, fixedIn: %s",
logging.Log.Debugf("Not affected: %s : %s, fixedIn: %s",
installed.Name, installed.Version, fixstat.FixedIn)
}
}
@@ -196,10 +202,12 @@ func extractToVulnInfos(pkgName string, cves []WpCveInfo) (vinfos []models.VulnI
CveID: cveID,
CveContents: models.NewCveContents(
models.CveContent{
Type: models.WpScan,
CveID: cveID,
Title: vulnerability.Title,
References: refs,
Type: models.WpScan,
CveID: cveID,
Title: vulnerability.Title,
References: refs,
Published: vulnerability.CreatedAt,
LastModified: vulnerability.UpdatedAt,
},
),
VulnType: vulnerability.VulnType,
@@ -217,21 +225,27 @@ func extractToVulnInfos(pkgName string, cves []WpCveInfo) (vinfos []models.VulnI
}
func httpRequest(url, token string) (string, error) {
retry := 1
util.Log.Debugf("%s", url)
req, err := http.NewRequest("GET", url, nil)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
defer cancel()
if err != nil {
return "", err
return "", errof.New(errof.ErrFailedToAccessWpScan,
fmt.Sprintf("Failed to access to wpscan.com. err: %s", err))
}
req.Header.Set("Authorization", fmt.Sprintf("Token token=%s", token))
loop:
resp, err := new(http.Client).Do(req)
client, err := util.GetHTTPClient(config.Conf.HTTPProxy)
if err != nil {
return "", err
}
body, err := ioutil.ReadAll(resp.Body)
resp, err := client.Do(req)
if err != nil {
return "", err
return "", errof.New(errof.ErrFailedToAccessWpScan,
fmt.Sprintf("Failed to access to wpscan.com. err: %s", err))
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", errof.New(errof.ErrFailedToAccessWpScan,
fmt.Sprintf("Failed to access to wpscan.com. err: %s", err))
}
defer resp.Body.Close()
if resp.StatusCode == 200 {
@@ -239,14 +253,13 @@ loop:
} else if resp.StatusCode == 404 {
// This package is not in wpscan
return "", nil
} else if resp.StatusCode == 429 && retry <= 3 {
// 429 Too Many Requests
util.Log.Debugf("sleep %d min(s): %s", retry, resp.Status)
time.Sleep(time.Duration(retry) * time.Minute)
retry++
goto loop
} else if resp.StatusCode == 429 {
return "", errof.New(errof.ErrWpScanAPILimitExceeded,
fmt.Sprintf("wpscan.com API limit exceeded: %+v", resp.Status))
} else {
logging.Log.Warnf("wpscan.com unknown status code: %+v", resp.Status)
return "", nil
}
return "", err
}
func removeInactives(pkgs models.WordPressPackages) (removed models.WordPressPackages) {

View File

@@ -1,4 +1,7 @@
package wordpress
//go:build !scanner
// +build !scanner
package detector
import (
"reflect"

View File

@@ -19,6 +19,9 @@ var (
// ErrFailedToAccessWpScan is error of wpscan.com api access
ErrFailedToAccessWpScan ErrorCode = "ErrFailedToAccessWpScan"
// ErrWpScanAPILimitExceeded is error of wpscan.com api limit exceeded
ErrWpScanAPILimitExceeded ErrorCode = "ErrWpScanAPILimitExceeded"
)
// New :

View File

@@ -1,85 +0,0 @@
// +build !scanner
package exploit
import (
"encoding/json"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
"github.com/vulsio/go-exploitdb/db"
exploitmodels "github.com/vulsio/go-exploitdb/models"
)
// FillWithExploit fills exploit information that has in Exploit
func FillWithExploit(driver db.DB, r *models.ScanResult, cnf *config.ExploitConf) (nExploitCve int, err error) {
if cnf.IsFetchViaHTTP() {
var cveIDs []string
for cveID := range r.ScannedCves {
cveIDs = append(cveIDs, cveID)
}
prefix, _ := util.URLPathJoin(cnf.URL, "cves")
responses, err := getCvesViaHTTP(cveIDs, prefix)
if err != nil {
return 0, err
}
for _, res := range responses {
exps := []*exploitmodels.Exploit{}
if err := json.Unmarshal([]byte(res.json), &exps); err != nil {
return 0, err
}
exploits := ConvertToModels(exps)
v, ok := r.ScannedCves[res.request.cveID]
if ok {
v.Exploits = exploits
}
r.ScannedCves[res.request.cveID] = v
nExploitCve++
}
} else {
if driver == nil {
return 0, nil
}
for cveID, vuln := range r.ScannedCves {
if cveID == "" {
continue
}
es := driver.GetExploitByCveID(cveID)
if len(es) == 0 {
continue
}
exploits := ConvertToModels(es)
vuln.Exploits = exploits
r.ScannedCves[cveID] = vuln
nExploitCve++
}
}
return nExploitCve, nil
}
// ConvertToModels converts gost model to vuls model
func ConvertToModels(es []*exploitmodels.Exploit) (exploits []models.Exploit) {
for _, e := range es {
var documentURL, shellURL *string
if e.OffensiveSecurity != nil {
os := e.OffensiveSecurity
if os.Document != nil {
documentURL = &os.Document.DocumentURL
}
if os.ShellCode != nil {
shellURL = &os.ShellCode.ShellCodeURL
}
}
exploit := models.Exploit{
ExploitType: e.ExploitType,
ID: e.ExploitUniqueID,
URL: e.URL,
Description: e.Description,
DocumentURL: documentURL,
ShellCodeURL: shellURL,
}
exploits = append(exploits, exploit)
}
return exploits
}

View File

@@ -1,115 +0,0 @@
package exploit
import (
"net/http"
"time"
"github.com/cenkalti/backoff"
"github.com/future-architect/vuls/util"
"github.com/parnurzeal/gorequest"
"golang.org/x/xerrors"
)
type response struct {
request request
json string
}
func getCvesViaHTTP(cveIDs []string, urlPrefix string) (
responses []response, err error) {
nReq := len(cveIDs)
reqChan := make(chan request, nReq)
resChan := make(chan response, nReq)
errChan := make(chan error, nReq)
defer close(reqChan)
defer close(resChan)
defer close(errChan)
go func() {
for _, cveID := range cveIDs {
reqChan <- request{
cveID: cveID,
}
}
}()
concurrency := 10
tasks := util.GenWorkers(concurrency)
for i := 0; i < nReq; i++ {
tasks <- func() {
select {
case req := <-reqChan:
url, err := util.URLPathJoin(
urlPrefix,
req.cveID,
)
if err != nil {
errChan <- err
} else {
util.Log.Debugf("HTTP Request to %s", url)
httpGet(url, req, resChan, errChan)
}
}
}
}
timeout := time.After(2 * 60 * time.Second)
var errs []error
for i := 0; i < nReq; i++ {
select {
case res := <-resChan:
responses = append(responses, res)
case err := <-errChan:
errs = append(errs, err)
case <-timeout:
return nil, xerrors.New("Timeout Fetching OVAL")
}
}
if len(errs) != 0 {
return nil, xerrors.Errorf("Failed to fetch OVAL. err: %w", errs)
}
return
}
type request struct {
osMajorVersion string
packName string
isSrcPack bool
cveID string
}
func httpGet(url string, req request, resChan chan<- response, errChan chan<- error) {
var body string
var errs []error
var resp *http.Response
count, retryMax := 0, 3
f := func() (err error) {
// resp, body, errs = gorequest.New().SetDebug(config.Conf.Debug).Get(url).End()
resp, body, errs = gorequest.New().Get(url).End()
if 0 < len(errs) || resp == nil || resp.StatusCode != 200 {
count++
if count == retryMax {
return nil
}
return xerrors.Errorf("HTTP GET error, url: %s, resp: %v, err: %s", url, resp, errs)
}
return nil
}
notify := func(err error, t time.Duration) {
util.Log.Warnf("Failed to HTTP GET. retrying in %s seconds. err: %s", t, err)
}
err := backoff.RetryNotify(f, backoff.NewExponentialBackOff(), notify)
if err != nil {
errChan <- xerrors.Errorf("HTTP Error %w", err)
return
}
if count == retryMax {
errChan <- xerrors.New("Retry count exceeded")
return
}
resChan <- response{
request: req,
json: body,
}
}

View File

@@ -1,197 +0,0 @@
package github
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"time"
"github.com/future-architect/vuls/config"
"github.com/future-architect/vuls/errof"
"github.com/future-architect/vuls/models"
"golang.org/x/oauth2"
)
// DetectGitHubSecurityAlerts access to owner/repo on GitHub and fetch security alerts of the repository via GitHub API v4 GraphQL and then set to the given ScanResult.
// https://help.github.com/articles/about-security-alerts-for-vulnerable-dependencies/
//TODO move to report
func DetectGitHubSecurityAlerts(r *models.ScanResult, owner, repo, token string) (nCVEs int, err error) {
src := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: token},
)
httpClient := oauth2.NewClient(context.Background(), src)
// TODO Use `https://github.com/shurcooL/githubv4` if the tool supports vulnerabilityAlerts Endpoint
// Memo : https://developer.github.com/v4/explorer/
const jsonfmt = `{"query":
"query { repository(owner:\"%s\", name:\"%s\") { url vulnerabilityAlerts(first: %d, %s) { pageInfo { endCursor hasNextPage startCursor } edges { node { id dismissReason dismissedAt securityVulnerability{ package { name ecosystem } severity vulnerableVersionRange firstPatchedVersion { identifier } } securityAdvisory { description ghsaId permalink publishedAt summary updatedAt withdrawnAt origin severity references { url } identifiers { type value } } } } } } } "}`
after := ""
for {
jsonStr := fmt.Sprintf(jsonfmt, owner, repo, 100, after)
req, err := http.NewRequest("POST",
"https://api.github.com/graphql",
bytes.NewBuffer([]byte(jsonStr)),
)
if err != nil {
return 0, err
}
// https://developer.github.com/v4/previews/#repository-vulnerability-alerts
// To toggle this preview and access data, need to provide a custom media type in the Accept header:
// MEMO: I tried to get the affected version via GitHub API. Bit it seems difficult to determin the affected version if there are multiple dependency files such as package.json.
// TODO remove this header if it is no longer preview status in the future.
req.Header.Set("Accept", "application/vnd.github.package-deletes-preview+json")
req.Header.Set("Content-Type", "application/json")
resp, err := httpClient.Do(req)
if err != nil {
return 0, err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return 0, err
}
alerts := SecurityAlerts{}
if err := json.Unmarshal(body, &alerts); err != nil {
return 0, err
}
// util.Log.Debugf("%s", pp.Sprint(alerts))
// util.Log.Debugf("%s", string(body))
if alerts.Data.Repository.URL == "" {
return 0, errof.New(errof.ErrFailedToAccessGithubAPI,
fmt.Sprintf("Failed to access to GitHub API. Response: %s", string(body)))
}
for _, v := range alerts.Data.Repository.VulnerabilityAlerts.Edges {
if config.Conf.IgnoreGitHubDismissed && v.Node.DismissReason != "" {
continue
}
pkgName := fmt.Sprintf("%s %s",
alerts.Data.Repository.URL, v.Node.SecurityVulnerability.Package.Name)
m := models.GitHubSecurityAlert{
PackageName: pkgName,
FixedIn: v.Node.SecurityVulnerability.FirstPatchedVersion.Identifier,
AffectedRange: v.Node.SecurityVulnerability.VulnerableVersionRange,
Dismissed: len(v.Node.DismissReason) != 0,
DismissedAt: v.Node.DismissedAt,
DismissReason: v.Node.DismissReason,
}
cveIDs, other := []string{}, []string{}
for _, identifier := range v.Node.SecurityAdvisory.Identifiers {
if identifier.Type == "CVE" {
cveIDs = append(cveIDs, identifier.Value)
} else {
other = append(other, identifier.Value)
}
}
// If CVE-ID has not been assigned, use the GHSA ID etc as a ID.
if len(cveIDs) == 0 {
cveIDs = other
}
refs := []models.Reference{}
for _, r := range v.Node.SecurityAdvisory.References {
refs = append(refs, models.Reference{Link: r.URL})
}
for _, cveID := range cveIDs {
cveContent := models.CveContent{
Type: models.GitHub,
CveID: cveID,
Title: v.Node.SecurityAdvisory.Summary,
Summary: v.Node.SecurityAdvisory.Description,
Cvss2Severity: v.Node.SecurityVulnerability.Severity,
Cvss3Severity: v.Node.SecurityVulnerability.Severity,
SourceLink: v.Node.SecurityAdvisory.Permalink,
References: refs,
Published: v.Node.SecurityAdvisory.PublishedAt,
LastModified: v.Node.SecurityAdvisory.UpdatedAt,
}
if val, ok := r.ScannedCves[cveID]; ok {
val.GitHubSecurityAlerts = val.GitHubSecurityAlerts.Add(m)
val.CveContents[models.GitHub] = cveContent
r.ScannedCves[cveID] = val
} else {
v := models.VulnInfo{
CveID: cveID,
Confidences: models.Confidences{models.GitHubMatch},
GitHubSecurityAlerts: models.GitHubSecurityAlerts{m},
CveContents: models.NewCveContents(cveContent),
}
r.ScannedCves[cveID] = v
}
nCVEs++
}
}
if !alerts.Data.Repository.VulnerabilityAlerts.PageInfo.HasNextPage {
break
}
after = fmt.Sprintf(`after: \"%s\"`, alerts.Data.Repository.VulnerabilityAlerts.PageInfo.EndCursor)
}
return nCVEs, err
}
//SecurityAlerts has detected CVE-IDs, PackageNames, Refs
type SecurityAlerts struct {
Data struct {
Repository struct {
URL string `json:"url"`
VulnerabilityAlerts struct {
PageInfo struct {
EndCursor string `json:"endCursor"`
HasNextPage bool `json:"hasNextPage"`
StartCursor string `json:"startCursor"`
} `json:"pageInfo"`
Edges []struct {
Node struct {
ID string `json:"id"`
DismissReason string `json:"dismissReason"`
DismissedAt time.Time `json:"dismissedAt"`
SecurityVulnerability struct {
Package struct {
Name string `json:"name"`
Ecosystem string `json:"ecosystem"`
} `json:"package"`
Severity string `json:"severity"`
VulnerableVersionRange string `json:"vulnerableVersionRange"`
FirstPatchedVersion struct {
Identifier string `json:"identifier"`
} `json:"firstPatchedVersion"`
} `json:"securityVulnerability"`
SecurityAdvisory struct {
Description string `json:"description"`
GhsaID string `json:"ghsaId"`
Permalink string `json:"permalink"`
PublishedAt time.Time `json:"publishedAt"`
Summary string `json:"summary"`
UpdatedAt time.Time `json:"updatedAt"`
WithdrawnAt time.Time `json:"withdrawnAt"`
Origin string `json:"origin"`
Severity string `json:"severity"`
References []struct {
URL string `json:"url"`
} `json:"references"`
Identifiers []struct {
Type string `json:"type"`
Value string `json:"value"`
} `json:"identifiers"`
} `json:"securityAdvisory"`
} `json:"node"`
} `json:"edges"`
} `json:"vulnerabilityAlerts"`
} `json:"repository"`
} `json:"data"`
}

197
go.mod
View File

@@ -1,50 +1,189 @@
module github.com/future-architect/vuls
go 1.15
go 1.20
require (
github.com/Azure/azure-sdk-for-go v50.2.0+incompatible
github.com/BurntSushi/toml v0.3.1
github.com/aquasecurity/fanal v0.0.0-20210119051230-28c249da7cfd
github.com/aquasecurity/trivy v0.15.0
github.com/aquasecurity/trivy-db v0.0.0-20210121143430-2a5c54036a86
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef
github.com/aws/aws-sdk-go v1.36.31
github.com/boltdb/bolt v1.3.1
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible
github.com/BurntSushi/toml v1.3.2
github.com/CycloneDX/cyclonedx-go v0.7.1
github.com/Ullaakut/nmap/v2 v2.2.2
github.com/aquasecurity/go-dep-parser v0.0.0-20221114145626-35ef808901e8
github.com/aquasecurity/trivy v0.35.0
github.com/aquasecurity/trivy-db v0.0.0-20220627104749-930461748b63
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
github.com/aws/aws-sdk-go v1.44.300
github.com/c-robinson/iplib v1.0.6
github.com/cenkalti/backoff v2.2.1+incompatible
github.com/d4l3k/messagediff v1.2.2-0.20190829033028-7e0a312ae40b
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
github.com/emersion/go-smtp v0.14.0
github.com/emersion/go-smtp v0.16.0
github.com/google/go-cmp v0.5.9
github.com/google/subcommands v1.2.0
github.com/google/uuid v1.3.0
github.com/gosnmp/gosnmp v1.35.0
github.com/gosuri/uitable v0.0.4
github.com/hashicorp/go-uuid v1.0.2
github.com/hashicorp/go-version v1.2.1
github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c
github.com/hashicorp/go-uuid v1.0.3
github.com/hashicorp/go-version v1.6.0
github.com/jesseduffield/gocui v0.3.0
github.com/k0kubun/pp v3.0.1+incompatible
github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f
github.com/knqyf263/go-cpe v0.0.0-20201213041631-54f6ab28673f
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
github.com/knqyf263/go-rpm-version v0.0.0-20170716094938-74609b86c936
github.com/knqyf263/gost v0.1.7
github.com/kotakanbe/go-cve-dictionary v0.5.7
github.com/knqyf263/go-rpm-version v0.0.0-20220614171824-631e686d1075
github.com/kotakanbe/go-pingscanner v0.1.0
github.com/kotakanbe/goval-dictionary v0.3.1
github.com/kotakanbe/logrus-prefixed-formatter v0.0.0-20180123152602-928f7356cb96
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mitchellh/go-homedir v1.1.0
github.com/nlopes/slack v0.6.0
github.com/nsf/termbox-go v0.0.0-20200418040025-38ba6e5628f1 // indirect
github.com/olekukonko/tablewriter v0.0.4
github.com/olekukonko/tablewriter v0.0.5
github.com/package-url/packageurl-go v0.1.1-0.20220203205134-d70459300c8a
github.com/parnurzeal/gorequest v0.2.16
github.com/pkg/errors v0.9.1
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
github.com/sirupsen/logrus v1.7.0
github.com/spf13/afero v1.5.1
github.com/spf13/cobra v1.1.1
github.com/takuzoo3868/go-msfdb v0.1.3
github.com/vulsio/go-exploitdb v0.1.4
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad
golang.org/x/oauth2 v0.0.0-20210125201302-af13f521f196
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
k8s.io/utils v0.0.0-20210111153108-fddb29f9d009
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.7.0
github.com/vulsio/go-cti v0.0.3
github.com/vulsio/go-cve-dictionary v0.8.4
github.com/vulsio/go-exploitdb v0.4.5
github.com/vulsio/go-kev v0.1.2
github.com/vulsio/go-msfdb v0.2.2
github.com/vulsio/gost v0.4.4
github.com/vulsio/goval-dictionary v0.9.2
go.etcd.io/bbolt v1.3.7
golang.org/x/exp v0.0.0-20230425010034-47ecfdc1ba53
golang.org/x/oauth2 v0.8.0
golang.org/x/sync v0.2.0
golang.org/x/text v0.9.0
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2
)
require (
cloud.google.com/go v0.107.0 // indirect
cloud.google.com/go/compute v1.15.1 // indirect
cloud.google.com/go/compute/metadata v0.2.3 // indirect
cloud.google.com/go/iam v0.8.0 // indirect
cloud.google.com/go/storage v1.27.0 // indirect
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
github.com/Azure/go-autorest/autorest v0.11.28 // indirect
github.com/Azure/go-autorest/autorest/adal v0.9.21 // indirect
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
github.com/Azure/go-autorest/autorest/to v0.3.0 // indirect
github.com/Azure/go-autorest/logger v0.2.1 // indirect
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/PuerkitoBio/goquery v1.8.1 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce // indirect
github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 // indirect
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 // indirect
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
github.com/briandowns/spinner v1.23.0 // indirect
github.com/caarlos0/env/v6 v6.10.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cheggaaa/pb/v3 v3.1.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dnaeon/go-vcr v1.2.0 // indirect
github.com/docker/cli v20.10.20+incompatible // indirect
github.com/docker/distribution v2.8.2+incompatible // indirect
github.com/docker/docker v23.0.4+incompatible // indirect
github.com/docker/docker-credential-helpers v0.7.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/fatih/color v1.15.0 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/glebarez/go-sqlite v1.21.1 // indirect
github.com/glebarez/sqlite v1.8.1-0.20230417114740-1accfe103bf2 // indirect
github.com/go-redis/redis/v8 v8.11.5 // indirect
github.com/go-sql-driver/mysql v1.7.1 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/gofrs/uuid v4.0.0+incompatible // indirect
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-containerregistry v0.12.0 // indirect
github.com/google/licenseclassifier/v2 v2.0.0-pre6 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.2.1 // indirect
github.com/googleapis/gax-go/v2 v2.7.0 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/gorilla/websocket v1.4.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-getter v1.7.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.1 // indirect
github.com/hashicorp/go-safetemp v1.0.0 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/log15 v3.0.0-testing.5+incompatible // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.3.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.15.11 // indirect
github.com/liamg/jfather v0.0.7 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 // indirect
github.com/masahiro331/go-xfs-filesystem v0.0.0-20221127135739-051c25f1becd // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.18 // indirect
github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/nsf/termbox-go v1.1.1 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.0-rc2 // indirect
github.com/pelletier/go-toml/v2 v2.0.7 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rivo/uniseg v0.4.4 // indirect
github.com/rogpeppe/go-internal v1.8.1 // indirect
github.com/samber/lo v1.33.0 // indirect
github.com/sergi/go-diff v1.3.1 // indirect
github.com/smartystreets/assertions v1.13.0 // indirect
github.com/spdx/tools-golang v0.3.0 // indirect
github.com/spf13/afero v1.9.5 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/viper v1.15.0 // indirect
github.com/stretchr/objx v0.5.0 // indirect
github.com/stretchr/testify v1.8.2 // indirect
github.com/subosito/gotenv v1.4.2 // indirect
github.com/ulikunitz/xz v0.5.11 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
go.opencensus.io v0.24.0 // indirect
go.uber.org/atomic v1.10.0 // indirect
go.uber.org/goleak v1.1.12 // indirect
go.uber.org/multierr v1.8.0 // indirect
go.uber.org/zap v1.23.0 // indirect
golang.org/x/crypto v0.9.0 // indirect
golang.org/x/mod v0.10.0 // indirect
golang.org/x/net v0.10.0 // indirect
golang.org/x/sys v0.8.0 // indirect
golang.org/x/term v0.8.0 // indirect
golang.org/x/tools v0.9.1 // indirect
google.golang.org/api v0.107.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f // indirect
google.golang.org/grpc v1.53.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/driver/mysql v1.5.0 // indirect
gorm.io/driver/postgres v1.5.0 // indirect
gorm.io/gorm v1.25.0 // indirect
k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed // indirect
modernc.org/libc v1.22.6 // indirect
modernc.org/mathutil v1.5.0 // indirect
modernc.org/memory v1.5.0 // indirect
modernc.org/sqlite v1.22.1 // indirect
moul.io/http2curl v1.0.0 // indirect
)

1758
go.sum

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +0,0 @@
// +build !scanner
package gost
import (
"github.com/future-architect/vuls/models"
"github.com/knqyf263/gost/db"
)
// Base is a base struct
type Base struct {
}
// FillCVEsWithRedHat fills cve information that has in Gost
func (b Base) FillCVEsWithRedHat(driver db.DB, r *models.ScanResult) error {
return RedHat{}.fillCvesWithRedHatAPI(driver, r)
}

View File

@@ -1,15 +1,22 @@
//go:build !scanner
// +build !scanner
package gost
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"github.com/future-architect/vuls/config"
debver "github.com/knqyf263/go-deb-version"
"golang.org/x/exp/maps"
"golang.org/x/xerrors"
"github.com/future-architect/vuls/logging"
"github.com/future-architect/vuls/models"
"github.com/future-architect/vuls/util"
"github.com/knqyf263/gost/db"
gostmodels "github.com/knqyf263/gost/models"
gostmodels "github.com/vulsio/gost/models"
)
// Debian is Gost client for Debian GNU/Linux
@@ -17,154 +24,249 @@ type Debian struct {
Base
}
type packCves struct {
packName string
isSrcPack bool
cves []models.CveContent
}
func (deb Debian) supported(major string) bool {
_, ok := map[string]string{
"7": "wheezy",
"8": "jessie",
"9": "stretch",
"10": "buster",
"11": "bullseye",
"12": "bookworm",
// "13": "trixie",
// "14": "forky",
}[major]
return ok
}
// DetectUnfixed fills cve information that has in Gost
func (deb Debian) DetectUnfixed(driver db.DB, r *models.ScanResult, _ bool) (nCVEs int, err error) {
// DetectCVEs fills cve information that has in Gost
func (deb Debian) DetectCVEs(r *models.ScanResult, _ bool) (nCVEs int, err error) {
if !deb.supported(major(r.Release)) {
// only logging
util.Log.Warnf("Debian %s is not supported yet", r.Release)
logging.Log.Warnf("Debian %s is not supported yet", r.Release)
return 0, nil
}
linuxImage := "linux-image-" + r.RunningKernel.Release
// Add linux and set the version of running kernel to search OVAL.
if r.Container.ContainerID == "" {
newVer := ""
if p, ok := r.Packages[linuxImage]; ok {
newVer = p.NewVersion
}
r.Packages["linux"] = models.Package{
Name: "linux",
Version: r.RunningKernel.Version,
NewVersion: newVer,
if r.RunningKernel.Release == "" {
logging.Log.Warnf("Since the exact kernel release is not available, the vulnerability in the kernel package is not detected.")
}
}
// Debian Security Tracker does not support Package for Raspbian, so skip it.
var scanResult models.ScanResult
if r.Family != config.Raspbian {
scanResult = *r
} else {
scanResult = r.RemoveRaspbianPackFromResult()
fixedCVEs, err := deb.detectCVEsWithFixState(r, true)
if err != nil {
return 0, xerrors.Errorf("Failed to detect fixed CVEs. err: %w", err)
}
packCvesList := []packCves{}
if config.Conf.Gost.IsFetchViaHTTP() {
url, _ := util.URLPathJoin(config.Conf.Gost.URL, "debian", major(scanResult.Release), "pkgs")
responses, err := getAllUnfixedCvesViaHTTP(r, url)
unfixedCVEs, err := deb.detectCVEsWithFixState(r, false)
if err != nil {
return 0, xerrors.Errorf("Failed to detect unfixed CVEs. err: %w", err)
}
return len(unique(append(fixedCVEs, unfixedCVEs...))), nil
}
func (deb Debian) detectCVEsWithFixState(r *models.ScanResult, fixed bool) ([]string, error) {
detects := map[string]cveContent{}
if deb.driver == nil {
urlPrefix, err := util.URLPathJoin(deb.baseURL, "debian", major(r.Release), "pkgs")
if err != nil {
return 0, err
return nil, xerrors.Errorf("Failed to join URLPath. err: %w", err)
}
s := "fixed-cves"
if !fixed {
s = "unfixed-cves"
}
responses, err := getCvesWithFixStateViaHTTP(r, urlPrefix, s)
if err != nil {
return nil, xerrors.Errorf("Failed to get CVEs via HTTP. err: %w", err)
}
for _, res := range responses {
debCves := map[string]gostmodels.DebianCVE{}
if err := json.Unmarshal([]byte(res.json), &debCves); err != nil {
return 0, err
}
cves := []models.CveContent{}
for _, debcve := range debCves {
cves = append(cves, *deb.ConvertToModel(&debcve))
}
packCvesList = append(packCvesList, packCves{
packName: res.request.packName,
isSrcPack: res.request.isSrcPack,
cves: cves,
})
}
} else {
if driver == nil {
return 0, nil
}
for _, pack := range scanResult.Packages {
cveDebs := driver.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
cves := []models.CveContent{}
for _, cveDeb := range cveDebs {
cves = append(cves, *deb.ConvertToModel(&cveDeb))
}
packCvesList = append(packCvesList, packCves{
packName: pack.Name,
isSrcPack: false,
cves: cves,
})
}
// SrcPack
for _, pack := range scanResult.SrcPackages {
cveDebs := driver.GetUnfixedCvesDebian(major(scanResult.Release), pack.Name)
cves := []models.CveContent{}
for _, cveDeb := range cveDebs {
cves = append(cves, *deb.ConvertToModel(&cveDeb))
}
packCvesList = append(packCvesList, packCves{
packName: pack.Name,
isSrcPack: true,
cves: cves,
})
}
}
delete(r.Packages, "linux")
for _, p := range packCvesList {
for _, cve := range p.cves {
v, ok := r.ScannedCves[cve.CveID]
if ok {
if v.CveContents == nil {
v.CveContents = models.NewCveContents(cve)
} else {
v.CveContents[models.DebianSecurityTracker] = cve
}
} else {
v = models.VulnInfo{
CveID: cve.CveID,
CveContents: models.NewCveContents(cve),
Confidences: models.Confidences{models.DebianSecurityTrackerMatch},
}
nCVEs++
if !res.request.isSrcPack {
continue
}
names := []string{}
if p.isSrcPack {
if srcPack, ok := r.SrcPackages[p.packName]; ok {
for _, binName := range srcPack.BinaryNames {
if _, ok := r.Packages[binName]; ok {
names = append(names, binName)
}
n := strings.NewReplacer("linux-signed", "linux", "linux-latest", "linux", "-amd64", "", "-arm64", "", "-i386", "").Replace(res.request.packName)
if deb.isKernelSourcePackage(n) {
isRunning := false
for _, bn := range r.SrcPackages[res.request.packName].BinaryNames {
if bn == fmt.Sprintf("linux-image-%s", r.RunningKernel.Release) {
isRunning = true
break
}
}
} else {
if p.packName == "linux" {
names = append(names, linuxImage)
} else {
names = append(names, p.packName)
// To detect vulnerabilities in running kernels only, skip if the kernel is not running.
if !isRunning {
continue
}
}
for _, name := range names {
v.AffectedPackages = v.AffectedPackages.Store(models.PackageFixStatus{
Name: name,
FixState: "open",
NotFixedYet: true,
})
cs := map[string]gostmodels.DebianCVE{}
if err := json.Unmarshal([]byte(res.json), &cs); err != nil {
return nil, xerrors.Errorf("Failed to unmarshal json. err: %w", err)
}
for _, content := range deb.detect(cs, models.SrcPackage{Name: res.request.packName, Version: r.SrcPackages[res.request.packName].Version, BinaryNames: r.SrcPackages[res.request.packName].BinaryNames}, models.Kernel{Release: r.RunningKernel.Release, Version: r.Packages[fmt.Sprintf("linux-image-%s", r.RunningKernel.Release)].Version}) {
c, ok := detects[content.cveContent.CveID]
if ok {
content.fixStatuses = append(content.fixStatuses, c.fixStatuses...)
}
detects[content.cveContent.CveID] = content
}
}
} else {
for _, p := range r.SrcPackages {
n := strings.NewReplacer("linux-signed", "linux", "linux-latest", "linux", "-amd64", "", "-arm64", "", "-i386", "").Replace(p.Name)
if deb.isKernelSourcePackage(n) {
isRunning := false
for _, bn := range p.BinaryNames {
if bn == fmt.Sprintf("linux-image-%s", r.RunningKernel.Release) {
isRunning = true
break
}
}
// To detect vulnerabilities in running kernels only, skip if the kernel is not running.
if !isRunning {
continue
}
}
var f func(string, string) (map[string]gostmodels.DebianCVE, error) = deb.driver.GetFixedCvesDebian
if !fixed {
f = deb.driver.GetUnfixedCvesDebian
}
cs, err := f(major(r.Release), n)
if err != nil {
return nil, xerrors.Errorf("Failed to get CVEs. release: %s, src package: %s, err: %w", major(r.Release), p.Name, err)
}
for _, content := range deb.detect(cs, p, models.Kernel{Release: r.RunningKernel.Release, Version: r.Packages[fmt.Sprintf("linux-image-%s", r.RunningKernel.Release)].Version}) {
c, ok := detects[content.cveContent.CveID]
if ok {
content.fixStatuses = append(content.fixStatuses, c.fixStatuses...)
}
detects[content.cveContent.CveID] = content
}
r.ScannedCves[cve.CveID] = v
}
}
return nCVEs, nil
for _, content := range detects {
v, ok := r.ScannedCves[content.cveContent.CveID]
if ok {
if v.CveContents == nil {
v.CveContents = models.NewCveContents(content.cveContent)
} else {
v.CveContents[models.DebianSecurityTracker] = []models.CveContent{content.cveContent}
}
v.Confidences.AppendIfMissing(models.DebianSecurityTrackerMatch)
} else {
v = models.VulnInfo{
CveID: content.cveContent.CveID,
CveContents: models.NewCveContents(content.cveContent),
Confidences: models.Confidences{models.DebianSecurityTrackerMatch},
}
}
for _, s := range content.fixStatuses {
v.AffectedPackages = v.AffectedPackages.Store(s)
}
r.ScannedCves[content.cveContent.CveID] = v
}
return maps.Keys(detects), nil
}
func (deb Debian) isKernelSourcePackage(pkgname string) bool {
switch ss := strings.Split(pkgname, "-"); len(ss) {
case 1:
return pkgname == "linux"
case 2:
if ss[0] != "linux" {
return false
}
switch ss[1] {
case "grsec":
return true
default:
_, err := strconv.ParseFloat(ss[1], 64)
return err == nil
}
default:
return false
}
}
func (deb Debian) detect(cves map[string]gostmodels.DebianCVE, srcPkg models.SrcPackage, runningKernel models.Kernel) []cveContent {
n := strings.NewReplacer("linux-signed", "linux", "linux-latest", "linux", "-amd64", "", "-arm64", "", "-i386", "").Replace(srcPkg.Name)
var contents []cveContent
for _, cve := range cves {
c := cveContent{
cveContent: *(Debian{}).ConvertToModel(&cve),
}
for _, p := range cve.Package {
for _, r := range p.Release {
switch r.Status {
case "open", "undetermined":
for _, bn := range srcPkg.BinaryNames {
if deb.isKernelSourcePackage(n) && bn != fmt.Sprintf("linux-image-%s", runningKernel.Release) {
continue
}
c.fixStatuses = append(c.fixStatuses, models.PackageFixStatus{
Name: bn,
FixState: r.Status,
NotFixedYet: true,
})
}
case "resolved":
installedVersion := srcPkg.Version
patchedVersion := r.FixedVersion
if deb.isKernelSourcePackage(n) {
installedVersion = runningKernel.Version
}
affected, err := deb.isGostDefAffected(installedVersion, patchedVersion)
if err != nil {
logging.Log.Debugf("Failed to parse versions: %s, Ver: %s, Gost: %s", err, installedVersion, patchedVersion)
continue
}
if affected {
for _, bn := range srcPkg.BinaryNames {
if deb.isKernelSourcePackage(n) && bn != fmt.Sprintf("linux-image-%s", runningKernel.Release) {
continue
}
c.fixStatuses = append(c.fixStatuses, models.PackageFixStatus{
Name: bn,
FixedIn: patchedVersion,
})
}
}
default:
logging.Log.Debugf("Failed to check vulnerable CVE. err: unknown status: %s", r.Status)
}
}
}
if len(c.fixStatuses) > 0 {
contents = append(contents, c)
}
}
return contents
}
func (deb Debian) isGostDefAffected(versionRelease, gostVersion string) (affected bool, err error) {
vera, err := debver.NewVersion(versionRelease)
if err != nil {
return false, xerrors.Errorf("Failed to parse version. version: %s, err: %w", versionRelease, err)
}
verb, err := debver.NewVersion(gostVersion)
if err != nil {
return false, xerrors.Errorf("Failed to parse version. version: %s, err: %w", gostVersion, err)
}
return vera.LessThan(verb), nil
}
// ConvertToModel converts gost model to vuls model
@@ -176,15 +278,17 @@ func (deb Debian) ConvertToModel(cve *gostmodels.DebianCVE) *models.CveContent {
break
}
}
var optinal map[string]string
if cve.Scope != "" {
optinal = map[string]string{"attack range": cve.Scope}
}
return &models.CveContent{
Type: models.DebianSecurityTracker,
CveID: cve.CveID,
Summary: cve.Description,
Cvss2Severity: severity,
Cvss3Severity: severity,
SourceLink: "https://security-tracker.debian.org/tracker/" + cve.CveID,
Optional: map[string]string{
"attack range": cve.Scope,
},
SourceLink: fmt.Sprintf("https://security-tracker.debian.org/tracker/%s", cve.CveID),
Optional: optinal,
}
}

Some files were not shown because too many files have changed in this diff Show More