mirror of
https://github.com/safedep/vet.git
synced 2025-12-11 01:01:10 -06:00
commit
bde7df3507
2
.github/workflows/.gitignore
vendored
Normal file
2
.github/workflows/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
|
||||||
|
dist/
|
||||||
67
.github/workflows/goreleaser.yml
vendored
Normal file
67
.github/workflows/goreleaser.yml
vendored
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
name: goreleaser
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "*" # triggers only if push new tag version, like `0.8.4` or else
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
goreleaser:
|
||||||
|
outputs:
|
||||||
|
hashes: ${{ steps.hash.outputs.hashes }}
|
||||||
|
permissions:
|
||||||
|
contents: write # for goreleaser/goreleaser-action to create a GitHub release
|
||||||
|
packages: write # for goreleaser/goreleaser-action to publish docker images
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
# Required for buildx on docker 19.x
|
||||||
|
DOCKER_CLI_EXPERIMENTAL: "enabled"
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # v2
|
||||||
|
- uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # v2
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@6edd4406fa81c3da01a34fa6f6343087c207a568 # v3.5.0
|
||||||
|
with:
|
||||||
|
go-version: 1.19
|
||||||
|
check-latest: true
|
||||||
|
- name: ghcr-login
|
||||||
|
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Run GoReleaser
|
||||||
|
id: run-goreleaser
|
||||||
|
uses: goreleaser/goreleaser-action@8f67e590f2d095516493f017008adc464e63adb1 # v4.1.0
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
args: release --rm-dist
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Generate subject
|
||||||
|
id: hash
|
||||||
|
env:
|
||||||
|
ARTIFACTS: "${{ steps.run-goreleaser.outputs.artifacts }}"
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
checksum_file=$(echo "$ARTIFACTS" | jq -r '.[] | select (.type=="Checksum") | .path')
|
||||||
|
echo "hashes=$(cat $checksum_file | base64 -w0)" >> "$GITHUB_OUTPUT"
|
||||||
|
provenance:
|
||||||
|
needs: [goreleaser]
|
||||||
|
permissions:
|
||||||
|
actions: read # To read the workflow path.
|
||||||
|
id-token: write # To sign the provenance.
|
||||||
|
contents: write # To add assets to a release.
|
||||||
|
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.4.0
|
||||||
|
with:
|
||||||
|
base64-subjects: "${{ needs.goreleaser.outputs.hashes }}"
|
||||||
|
upload-assets: true
|
||||||
|
private-repository: true
|
||||||
|
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -13,4 +13,7 @@
|
|||||||
|
|
||||||
# Dependency directories (remove the comment below to include it)
|
# Dependency directories (remove the comment below to include it)
|
||||||
# vendor/
|
# vendor/
|
||||||
|
|
||||||
/vet
|
/vet
|
||||||
|
|
||||||
|
dist/
|
||||||
|
|||||||
43
.goreleaser.yaml
Normal file
43
.goreleaser.yaml
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# This is an example .goreleaser.yml file with some sensible defaults.
|
||||||
|
# Make sure to check the documentation at https://goreleaser.com
|
||||||
|
before:
|
||||||
|
hooks:
|
||||||
|
- go mod tidy
|
||||||
|
- go generate ./...
|
||||||
|
builds:
|
||||||
|
- env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
goos:
|
||||||
|
- linux
|
||||||
|
- darwin
|
||||||
|
goarch:
|
||||||
|
- amd64
|
||||||
|
- arm64
|
||||||
|
|
||||||
|
archives:
|
||||||
|
- format: tar.gz
|
||||||
|
name_template: >-
|
||||||
|
{{ .ProjectName }}_
|
||||||
|
{{- title .Os }}_
|
||||||
|
{{- if eq .Arch "amd64" }}x86_64
|
||||||
|
{{- else if eq .Arch "386" }}i386
|
||||||
|
{{- else }}{{ .Arch }}{{ end }}
|
||||||
|
{{- if .Arm }}v{{ .Arm }}{{ end }}
|
||||||
|
format_overrides:
|
||||||
|
- goos: windows
|
||||||
|
format: zip
|
||||||
|
checksum:
|
||||||
|
name_template: 'checksums.txt'
|
||||||
|
snapshot:
|
||||||
|
name_template: "{{ incpatch .Version }}-next"
|
||||||
|
changelog:
|
||||||
|
sort: asc
|
||||||
|
filters:
|
||||||
|
exclude:
|
||||||
|
- '^docs:'
|
||||||
|
- '^test:'
|
||||||
|
|
||||||
|
# The lines beneath this are called `modelines`. See `:help modeline`
|
||||||
|
# Feel free to remove those if you don't want/use them.
|
||||||
|
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
|
||||||
|
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
|
||||||
35
Makefile
Normal file
35
Makefile
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
SHELL := /bin/bash
|
||||||
|
GITCOMMIT := $(shell git rev-parse HEAD)
|
||||||
|
VERSION := "$(shell git describe --tags --abbrev=0)-$(shell git rev-parse --short HEAD)"
|
||||||
|
|
||||||
|
all: clean setup vet
|
||||||
|
|
||||||
|
oapi-codegen-install:
|
||||||
|
go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@v1.10.1
|
||||||
|
|
||||||
|
oapi-codegen:
|
||||||
|
oapi-codegen -package insightapi -generate types ./api/insights-v1.yml > ./gen/insightapi/insights.types.go
|
||||||
|
oapi-codegen -package insightapi -generate client ./api/insights-v1.yml > ./gen/insightapi/insights.client.go
|
||||||
|
|
||||||
|
setup:
|
||||||
|
mkdir -p out gen/insightapi
|
||||||
|
|
||||||
|
GO_CFLAGS=-X main.commit=$(GITCOMMIT) -X main.version=$(VERSION)
|
||||||
|
GO_LDFLAGS=-ldflags "-w $(GO_CFLAGS)"
|
||||||
|
|
||||||
|
vet: oapi-codegen
|
||||||
|
go build ${GO_LDFLAGS}
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
test:
|
||||||
|
go test ./...
|
||||||
|
|
||||||
|
.PHONY: clean
|
||||||
|
clean:
|
||||||
|
-rm -rf out
|
||||||
|
-rm -rf gen
|
||||||
|
|
||||||
|
gosec:
|
||||||
|
-docker run --rm -it -w /app/ -v `pwd`:/app/ securego/gosec \
|
||||||
|
-exclude-dir=/app/gen -exclude-dir=/app/spec \
|
||||||
|
/app/...
|
||||||
44
README.md
44
README.md
@ -1,2 +1,42 @@
|
|||||||
# vet
|
# vet : The dependency vetting tool
|
||||||
Tool for identifying software supply chain risks using Insights API
|
Tool for identifying software supply chain risks
|
||||||
|
|
||||||
|
## TL;DR
|
||||||
|
|
||||||
|
Build this repository
|
||||||
|
|
||||||
|
> Ensure `$(go env GOPATH)/bin` is in your `$PATH`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make oapi-codegen-install && make
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively install using
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go install github.com/safedep/vet@latest
|
||||||
|
```
|
||||||
|
|
||||||
|
Configure `vet` to use API Key to access [Insights API](#)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
vet auth configure
|
||||||
|
```
|
||||||
|
|
||||||
|
> Alternatively pass the API key as environment to skip configuration
|
||||||
|
|
||||||
|
Run `vet` to identify risks
|
||||||
|
|
||||||
|
```bash
|
||||||
|
vet scan
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
Insights API Key can be passed at runtime using environment variable
|
||||||
|
|
||||||
|
```bash
|
||||||
|
VET_INSIGHTS_API_KEY=... vet scan
|
||||||
|
```
|
||||||
|
|||||||
780
api/insights-v1.yml
Normal file
780
api/insights-v1.yml
Normal file
@ -0,0 +1,780 @@
|
|||||||
|
openapi: 3.0.2
|
||||||
|
info:
|
||||||
|
title: SafeDep OSS Insights API
|
||||||
|
contact:
|
||||||
|
name: SafeDep API
|
||||||
|
url: 'https://safedep.io'
|
||||||
|
description: |
|
||||||
|
The Insights API expose various metadata about OSS artifacts. Clients can
|
||||||
|
query this API to gather the data required for rich policy decision making
|
||||||
|
for various use-cases.
|
||||||
|
version: 1.0.0
|
||||||
|
servers:
|
||||||
|
- url: 'https://{apiHost}/{apiBase}'
|
||||||
|
variables:
|
||||||
|
apiHost:
|
||||||
|
default: api.safedep.io
|
||||||
|
apiBase:
|
||||||
|
default: insights/v1
|
||||||
|
tags:
|
||||||
|
- name: Package Meta Data
|
||||||
|
description: Package meta data related operations
|
||||||
|
- name: Infrastructure
|
||||||
|
description: Infrastructure support operations
|
||||||
|
paths:
|
||||||
|
/healthz:
|
||||||
|
get:
|
||||||
|
description: Get health check status
|
||||||
|
operationId: getHealthCheckStatus
|
||||||
|
tags:
|
||||||
|
- Infrastructure
|
||||||
|
security: []
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Server is operational
|
||||||
|
'/{ecosystem}/packages/{name}/versions/{version}':
|
||||||
|
get:
|
||||||
|
description: Get metadata for a package version
|
||||||
|
operationId: getPackageVersionInsight
|
||||||
|
tags:
|
||||||
|
- Package Meta Data
|
||||||
|
security:
|
||||||
|
- api_key: []
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Successful response
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/PackageVersionInsight'
|
||||||
|
'404':
|
||||||
|
description: Requested resource was not found
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/ApiError'
|
||||||
|
'429':
|
||||||
|
description: Rate limit block
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/ApiError'
|
||||||
|
'500':
|
||||||
|
description: Failed due to internal server error
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/ApiError'
|
||||||
|
parameters:
|
||||||
|
- name: ecosystem
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Case insensitive ecosystem name
|
||||||
|
enum:
|
||||||
|
- Maven
|
||||||
|
- RubyGems
|
||||||
|
- Go
|
||||||
|
- npm
|
||||||
|
- PyPI
|
||||||
|
- Cargo
|
||||||
|
- NuGet
|
||||||
|
- Linux
|
||||||
|
- Debian
|
||||||
|
- Github Actions
|
||||||
|
- name: name
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: version
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
components:
|
||||||
|
securitySchemes:
|
||||||
|
api_key:
|
||||||
|
type: apiKey
|
||||||
|
name: Authorization
|
||||||
|
in: header
|
||||||
|
schemas:
|
||||||
|
ApiError:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
description: A descriptive message about the error meant for developer consumption
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
description: An optional service or domain specific error group
|
||||||
|
code:
|
||||||
|
type: string
|
||||||
|
description: An error code identifying the error
|
||||||
|
params:
|
||||||
|
type: object
|
||||||
|
description: Optional error specific attributes
|
||||||
|
additionalProperties:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
key:
|
||||||
|
type: string
|
||||||
|
value:
|
||||||
|
type: string
|
||||||
|
PackageVersion:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- ecosystem
|
||||||
|
- name
|
||||||
|
- version
|
||||||
|
properties:
|
||||||
|
ecosystem:
|
||||||
|
type: string
|
||||||
|
description: The ecosystem where this package belongs to
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: The name of the package
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
description: The version of the package
|
||||||
|
License:
|
||||||
|
type: string
|
||||||
|
description: License SPDX code
|
||||||
|
enum:
|
||||||
|
- 0BSD
|
||||||
|
- AAL
|
||||||
|
- Abstyles
|
||||||
|
- Adobe-2006
|
||||||
|
- Adobe-Glyph
|
||||||
|
- ADSL
|
||||||
|
- AFL-1.1
|
||||||
|
- AFL-1.2
|
||||||
|
- AFL-2.0
|
||||||
|
- AFL-2.1
|
||||||
|
- AFL-3.0
|
||||||
|
- Afmparse
|
||||||
|
- AGPL-1.0
|
||||||
|
- AGPL-1.0-only
|
||||||
|
- AGPL-1.0-or-later
|
||||||
|
- AGPL-3.0
|
||||||
|
- AGPL-3.0-only
|
||||||
|
- AGPL-3.0-or-later
|
||||||
|
- Aladdin
|
||||||
|
- AMDPLPA
|
||||||
|
- AML
|
||||||
|
- AMPAS
|
||||||
|
- ANTLR-PD
|
||||||
|
- ANTLR-PD-fallback
|
||||||
|
- Apache-1.0
|
||||||
|
- Apache-1.1
|
||||||
|
- Apache-2.0
|
||||||
|
- APAFML
|
||||||
|
- APL-1.0
|
||||||
|
- App-s2p
|
||||||
|
- APSL-1.0
|
||||||
|
- APSL-1.1
|
||||||
|
- APSL-1.2
|
||||||
|
- APSL-2.0
|
||||||
|
- Arphic-1999
|
||||||
|
- Artistic-1.0
|
||||||
|
- Artistic-1.0-cl8
|
||||||
|
- Artistic-1.0-Perl
|
||||||
|
- Artistic-2.0
|
||||||
|
- Baekmuk
|
||||||
|
- Bahyph
|
||||||
|
- Barr
|
||||||
|
- Beerware
|
||||||
|
- Bitstream-Vera
|
||||||
|
- BitTorrent-1.0
|
||||||
|
- BitTorrent-1.1
|
||||||
|
- blessing
|
||||||
|
- BlueOak-1.0.0
|
||||||
|
- Borceux
|
||||||
|
- BSD-1-Clause
|
||||||
|
- BSD-2-Clause
|
||||||
|
- BSD-2-Clause-FreeBSD
|
||||||
|
- BSD-2-Clause-NetBSD
|
||||||
|
- BSD-2-Clause-Patent
|
||||||
|
- BSD-2-Clause-Views
|
||||||
|
- BSD-3-Clause
|
||||||
|
- BSD-3-Clause-Attribution
|
||||||
|
- BSD-3-Clause-Clear
|
||||||
|
- BSD-3-Clause-LBNL
|
||||||
|
- BSD-3-Clause-Modification
|
||||||
|
- BSD-3-Clause-No-Military-License
|
||||||
|
- BSD-3-Clause-No-Nuclear-License
|
||||||
|
- BSD-3-Clause-No-Nuclear-License-2014
|
||||||
|
- BSD-3-Clause-No-Nuclear-Warranty
|
||||||
|
- BSD-3-Clause-Open-MPI
|
||||||
|
- BSD-4-Clause
|
||||||
|
- BSD-4-Clause-Shortened
|
||||||
|
- BSD-4-Clause-UC
|
||||||
|
- BSD-Protection
|
||||||
|
- BSD-Source-Code
|
||||||
|
- BSL-1.0
|
||||||
|
- BUSL-1.1
|
||||||
|
- bzip2-1.0.5
|
||||||
|
- bzip2-1.0.6
|
||||||
|
- C-UDA-1.0
|
||||||
|
- CAL-1.0
|
||||||
|
- CAL-1.0-Combined-Work-Exception
|
||||||
|
- Caldera
|
||||||
|
- CATOSL-1.1
|
||||||
|
- CC-BY-1.0
|
||||||
|
- CC-BY-2.0
|
||||||
|
- CC-BY-2.5
|
||||||
|
- CC-BY-2.5-AU
|
||||||
|
- CC-BY-3.0
|
||||||
|
- CC-BY-3.0-AT
|
||||||
|
- CC-BY-3.0-DE
|
||||||
|
- CC-BY-3.0-IGO
|
||||||
|
- CC-BY-3.0-NL
|
||||||
|
- CC-BY-3.0-US
|
||||||
|
- CC-BY-4.0
|
||||||
|
- CC-BY-NC-1.0
|
||||||
|
- CC-BY-NC-2.0
|
||||||
|
- CC-BY-NC-2.5
|
||||||
|
- CC-BY-NC-3.0
|
||||||
|
- CC-BY-NC-3.0-DE
|
||||||
|
- CC-BY-NC-4.0
|
||||||
|
- CC-BY-NC-ND-1.0
|
||||||
|
- CC-BY-NC-ND-2.0
|
||||||
|
- CC-BY-NC-ND-2.5
|
||||||
|
- CC-BY-NC-ND-3.0
|
||||||
|
- CC-BY-NC-ND-3.0-DE
|
||||||
|
- CC-BY-NC-ND-3.0-IGO
|
||||||
|
- CC-BY-NC-ND-4.0
|
||||||
|
- CC-BY-NC-SA-1.0
|
||||||
|
- CC-BY-NC-SA-2.0
|
||||||
|
- CC-BY-NC-SA-2.0-FR
|
||||||
|
- CC-BY-NC-SA-2.0-UK
|
||||||
|
- CC-BY-NC-SA-2.5
|
||||||
|
- CC-BY-NC-SA-3.0
|
||||||
|
- CC-BY-NC-SA-3.0-DE
|
||||||
|
- CC-BY-NC-SA-3.0-IGO
|
||||||
|
- CC-BY-NC-SA-4.0
|
||||||
|
- CC-BY-ND-1.0
|
||||||
|
- CC-BY-ND-2.0
|
||||||
|
- CC-BY-ND-2.5
|
||||||
|
- CC-BY-ND-3.0
|
||||||
|
- CC-BY-ND-3.0-DE
|
||||||
|
- CC-BY-ND-4.0
|
||||||
|
- CC-BY-SA-1.0
|
||||||
|
- CC-BY-SA-2.0
|
||||||
|
- CC-BY-SA-2.0-UK
|
||||||
|
- CC-BY-SA-2.1-JP
|
||||||
|
- CC-BY-SA-2.5
|
||||||
|
- CC-BY-SA-3.0
|
||||||
|
- CC-BY-SA-3.0-AT
|
||||||
|
- CC-BY-SA-3.0-DE
|
||||||
|
- CC-BY-SA-4.0
|
||||||
|
- CC-PDDC
|
||||||
|
- CC0-1.0
|
||||||
|
- CDDL-1.0
|
||||||
|
- CDDL-1.1
|
||||||
|
- CDL-1.0
|
||||||
|
- CDLA-Permissive-1.0
|
||||||
|
- CDLA-Permissive-2.0
|
||||||
|
- CDLA-Sharing-1.0
|
||||||
|
- CECILL-1.0
|
||||||
|
- CECILL-1.1
|
||||||
|
- CECILL-2.0
|
||||||
|
- CECILL-2.1
|
||||||
|
- CECILL-B
|
||||||
|
- CECILL-C
|
||||||
|
- CERN-OHL-1.1
|
||||||
|
- CERN-OHL-1.2
|
||||||
|
- CERN-OHL-P-2.0
|
||||||
|
- CERN-OHL-S-2.0
|
||||||
|
- CERN-OHL-W-2.0
|
||||||
|
- checkmk
|
||||||
|
- ClArtistic
|
||||||
|
- CNRI-Jython
|
||||||
|
- CNRI-Python
|
||||||
|
- CNRI-Python-GPL-Compatible
|
||||||
|
- COIL-1.0
|
||||||
|
- Community-Spec-1.0
|
||||||
|
- Condor-1.1
|
||||||
|
- copyleft-next-0.3.0
|
||||||
|
- copyleft-next-0.3.1
|
||||||
|
- CPAL-1.0
|
||||||
|
- CPL-1.0
|
||||||
|
- CPOL-1.02
|
||||||
|
- Crossword
|
||||||
|
- CrystalStacker
|
||||||
|
- CUA-OPL-1.0
|
||||||
|
- Cube
|
||||||
|
- curl
|
||||||
|
- D-FSL-1.0
|
||||||
|
- diffmark
|
||||||
|
- DL-DE-BY-2.0
|
||||||
|
- DOC
|
||||||
|
- Dotseqn
|
||||||
|
- DRL-1.0
|
||||||
|
- DSDP
|
||||||
|
- dvipdfm
|
||||||
|
- ECL-1.0
|
||||||
|
- ECL-2.0
|
||||||
|
- eCos-2.0
|
||||||
|
- EFL-1.0
|
||||||
|
- EFL-2.0
|
||||||
|
- eGenix
|
||||||
|
- Elastic-2.0
|
||||||
|
- Entessa
|
||||||
|
- EPICS
|
||||||
|
- EPL-1.0
|
||||||
|
- EPL-2.0
|
||||||
|
- ErlPL-1.1
|
||||||
|
- etalab-2.0
|
||||||
|
- EUDatagrid
|
||||||
|
- EUPL-1.0
|
||||||
|
- EUPL-1.1
|
||||||
|
- EUPL-1.2
|
||||||
|
- Eurosym
|
||||||
|
- Fair
|
||||||
|
- FDK-AAC
|
||||||
|
- Frameworx-1.0
|
||||||
|
- FreeBSD-DOC
|
||||||
|
- FreeImage
|
||||||
|
- FSFAP
|
||||||
|
- FSFUL
|
||||||
|
- FSFULLR
|
||||||
|
- FSFULLRWD
|
||||||
|
- FTL
|
||||||
|
- GD
|
||||||
|
- GFDL-1.1
|
||||||
|
- GFDL-1.1-invariants-only
|
||||||
|
- GFDL-1.1-invariants-or-later
|
||||||
|
- GFDL-1.1-no-invariants-only
|
||||||
|
- GFDL-1.1-no-invariants-or-later
|
||||||
|
- GFDL-1.1-only
|
||||||
|
- GFDL-1.1-or-later
|
||||||
|
- GFDL-1.2
|
||||||
|
- GFDL-1.2-invariants-only
|
||||||
|
- GFDL-1.2-invariants-or-later
|
||||||
|
- GFDL-1.2-no-invariants-only
|
||||||
|
- GFDL-1.2-no-invariants-or-later
|
||||||
|
- GFDL-1.2-only
|
||||||
|
- GFDL-1.2-or-later
|
||||||
|
- GFDL-1.3
|
||||||
|
- GFDL-1.3-invariants-only
|
||||||
|
- GFDL-1.3-invariants-or-later
|
||||||
|
- GFDL-1.3-no-invariants-only
|
||||||
|
- GFDL-1.3-no-invariants-or-later
|
||||||
|
- GFDL-1.3-only
|
||||||
|
- GFDL-1.3-or-later
|
||||||
|
- Giftware
|
||||||
|
- GL2PS
|
||||||
|
- Glide
|
||||||
|
- Glulxe
|
||||||
|
- GLWTPL
|
||||||
|
- gnuplot
|
||||||
|
- GPL-1.0
|
||||||
|
- GPL-1.0+
|
||||||
|
- GPL-1.0-only
|
||||||
|
- GPL-1.0-or-later
|
||||||
|
- GPL-2.0
|
||||||
|
- GPL-2.0+
|
||||||
|
- GPL-2.0-only
|
||||||
|
- GPL-2.0-or-later
|
||||||
|
- GPL-2.0-with-autoconf-exception
|
||||||
|
- GPL-2.0-with-bison-exception
|
||||||
|
- GPL-2.0-with-classpath-exception
|
||||||
|
- GPL-2.0-with-font-exception
|
||||||
|
- GPL-2.0-with-GCC-exception
|
||||||
|
- GPL-3.0
|
||||||
|
- GPL-3.0+
|
||||||
|
- GPL-3.0-only
|
||||||
|
- GPL-3.0-or-later
|
||||||
|
- GPL-3.0-with-autoconf-exception
|
||||||
|
- GPL-3.0-with-GCC-exception
|
||||||
|
- gSOAP-1.3b
|
||||||
|
- HaskellReport
|
||||||
|
- Hippocratic-2.1
|
||||||
|
- HPND
|
||||||
|
- HPND-sell-variant
|
||||||
|
- HTMLTIDY
|
||||||
|
- IBM-pibs
|
||||||
|
- ICU
|
||||||
|
- IJG
|
||||||
|
- ImageMagick
|
||||||
|
- iMatix
|
||||||
|
- Imlib2
|
||||||
|
- Info-ZIP
|
||||||
|
- Intel
|
||||||
|
- Intel-ACPI
|
||||||
|
- Interbase-1.0
|
||||||
|
- IPA
|
||||||
|
- IPL-1.0
|
||||||
|
- ISC
|
||||||
|
- Jam
|
||||||
|
- JasPer-2.0
|
||||||
|
- JPNIC
|
||||||
|
- JSON
|
||||||
|
- Knuth-CTAN
|
||||||
|
- LAL-1.2
|
||||||
|
- LAL-1.3
|
||||||
|
- Latex2e
|
||||||
|
- Leptonica
|
||||||
|
- LGPL-2.0
|
||||||
|
- LGPL-2.0+
|
||||||
|
- LGPL-2.0-only
|
||||||
|
- LGPL-2.0-or-later
|
||||||
|
- LGPL-2.1
|
||||||
|
- LGPL-2.1+
|
||||||
|
- LGPL-2.1-only
|
||||||
|
- LGPL-2.1-or-later
|
||||||
|
- LGPL-3.0
|
||||||
|
- LGPL-3.0+
|
||||||
|
- LGPL-3.0-only
|
||||||
|
- LGPL-3.0-or-later
|
||||||
|
- LGPLLR
|
||||||
|
- Libpng
|
||||||
|
- libpng-2.0
|
||||||
|
- libselinux-1.0
|
||||||
|
- libtiff
|
||||||
|
- libutil-David-Nugent
|
||||||
|
- LiLiQ-P-1.1
|
||||||
|
- LiLiQ-R-1.1
|
||||||
|
- LiLiQ-Rplus-1.1
|
||||||
|
- Linux-man-pages-copyleft
|
||||||
|
- Linux-OpenIB
|
||||||
|
- LOOP
|
||||||
|
- LPL-1.0
|
||||||
|
- LPL-1.02
|
||||||
|
- LPPL-1.0
|
||||||
|
- LPPL-1.1
|
||||||
|
- LPPL-1.2
|
||||||
|
- LPPL-1.3a
|
||||||
|
- LPPL-1.3c
|
||||||
|
- LZMA-SDK-9.11-to-9.20
|
||||||
|
- LZMA-SDK-9.22
|
||||||
|
- MakeIndex
|
||||||
|
- Minpack
|
||||||
|
- MirOS
|
||||||
|
- MIT
|
||||||
|
- MIT-0
|
||||||
|
- MIT-advertising
|
||||||
|
- MIT-CMU
|
||||||
|
- MIT-enna
|
||||||
|
- MIT-feh
|
||||||
|
- MIT-Modern-Variant
|
||||||
|
- MIT-open-group
|
||||||
|
- MITNFA
|
||||||
|
- Motosoto
|
||||||
|
- mpi-permissive
|
||||||
|
- mpich2
|
||||||
|
- MPL-1.0
|
||||||
|
- MPL-1.1
|
||||||
|
- MPL-2.0
|
||||||
|
- MPL-2.0-no-copyleft-exception
|
||||||
|
- mplus
|
||||||
|
- MS-LPL
|
||||||
|
- MS-PL
|
||||||
|
- MS-RL
|
||||||
|
- MTLL
|
||||||
|
- MulanPSL-1.0
|
||||||
|
- MulanPSL-2.0
|
||||||
|
- Multics
|
||||||
|
- Mup
|
||||||
|
- NAIST-2003
|
||||||
|
- NASA-1.3
|
||||||
|
- Naumen
|
||||||
|
- NBPL-1.0
|
||||||
|
- NCGL-UK-2.0
|
||||||
|
- NCSA
|
||||||
|
- Net-SNMP
|
||||||
|
- NetCDF
|
||||||
|
- Newsletr
|
||||||
|
- NGPL
|
||||||
|
- NICTA-1.0
|
||||||
|
- NIST-PD
|
||||||
|
- NIST-PD-fallback
|
||||||
|
- NLOD-1.0
|
||||||
|
- NLOD-2.0
|
||||||
|
- NLPL
|
||||||
|
- Nokia
|
||||||
|
- NOSL
|
||||||
|
- Noweb
|
||||||
|
- NPL-1.0
|
||||||
|
- NPL-1.1
|
||||||
|
- NPOSL-3.0
|
||||||
|
- NRL
|
||||||
|
- NTP
|
||||||
|
- NTP-0
|
||||||
|
- Nunit
|
||||||
|
- O-UDA-1.0
|
||||||
|
- OCCT-PL
|
||||||
|
- OCLC-2.0
|
||||||
|
- ODbL-1.0
|
||||||
|
- ODC-By-1.0
|
||||||
|
- OFL-1.0
|
||||||
|
- OFL-1.0-no-RFN
|
||||||
|
- OFL-1.0-RFN
|
||||||
|
- OFL-1.1
|
||||||
|
- OFL-1.1-no-RFN
|
||||||
|
- OFL-1.1-RFN
|
||||||
|
- OGC-1.0
|
||||||
|
- OGDL-Taiwan-1.0
|
||||||
|
- OGL-Canada-2.0
|
||||||
|
- OGL-UK-1.0
|
||||||
|
- OGL-UK-2.0
|
||||||
|
- OGL-UK-3.0
|
||||||
|
- OGTSL
|
||||||
|
- OLDAP-1.1
|
||||||
|
- OLDAP-1.2
|
||||||
|
- OLDAP-1.3
|
||||||
|
- OLDAP-1.4
|
||||||
|
- OLDAP-2.0
|
||||||
|
- OLDAP-2.0.1
|
||||||
|
- OLDAP-2.1
|
||||||
|
- OLDAP-2.2
|
||||||
|
- OLDAP-2.2.1
|
||||||
|
- OLDAP-2.2.2
|
||||||
|
- OLDAP-2.3
|
||||||
|
- OLDAP-2.4
|
||||||
|
- OLDAP-2.5
|
||||||
|
- OLDAP-2.6
|
||||||
|
- OLDAP-2.7
|
||||||
|
- OLDAP-2.8
|
||||||
|
- OML
|
||||||
|
- OpenSSL
|
||||||
|
- OPL-1.0
|
||||||
|
- OPUBL-1.0
|
||||||
|
- OSET-PL-2.1
|
||||||
|
- OSL-1.0
|
||||||
|
- OSL-1.1
|
||||||
|
- OSL-2.0
|
||||||
|
- OSL-2.1
|
||||||
|
- OSL-3.0
|
||||||
|
- Parity-6.0.0
|
||||||
|
- Parity-7.0.0
|
||||||
|
- PDDL-1.0
|
||||||
|
- PHP-3.0
|
||||||
|
- PHP-3.01
|
||||||
|
- Plexus
|
||||||
|
- PolyForm-Noncommercial-1.0.0
|
||||||
|
- PolyForm-Small-Business-1.0.0
|
||||||
|
- PostgreSQL
|
||||||
|
- PSF-2.0
|
||||||
|
- psfrag
|
||||||
|
- psutils
|
||||||
|
- Python-2.0
|
||||||
|
- Python-2.0.1
|
||||||
|
- Qhull
|
||||||
|
- QPL-1.0
|
||||||
|
- Rdisc
|
||||||
|
- RHeCos-1.1
|
||||||
|
- RPL-1.1
|
||||||
|
- RPL-1.5
|
||||||
|
- RPSL-1.0
|
||||||
|
- RSA-MD
|
||||||
|
- RSCPL
|
||||||
|
- Ruby
|
||||||
|
- SAX-PD
|
||||||
|
- Saxpath
|
||||||
|
- SCEA
|
||||||
|
- SchemeReport
|
||||||
|
- Sendmail
|
||||||
|
- Sendmail-8.23
|
||||||
|
- SGI-B-1.0
|
||||||
|
- SGI-B-1.1
|
||||||
|
- SGI-B-2.0
|
||||||
|
- SHL-0.5
|
||||||
|
- SHL-0.51
|
||||||
|
- SimPL-2.0
|
||||||
|
- SISSL
|
||||||
|
- SISSL-1.2
|
||||||
|
- Sleepycat
|
||||||
|
- SMLNJ
|
||||||
|
- SMPPL
|
||||||
|
- SNIA
|
||||||
|
- Spencer-86
|
||||||
|
- Spencer-94
|
||||||
|
- Spencer-99
|
||||||
|
- SPL-1.0
|
||||||
|
- SSH-OpenSSH
|
||||||
|
- SSH-short
|
||||||
|
- SSPL-1.0
|
||||||
|
- StandardML-NJ
|
||||||
|
- SugarCRM-1.1.3
|
||||||
|
- SWL
|
||||||
|
- Symlinks
|
||||||
|
- TAPR-OHL-1.0
|
||||||
|
- TCL
|
||||||
|
- TCP-wrappers
|
||||||
|
- TMate
|
||||||
|
- TORQUE-1.1
|
||||||
|
- TOSL
|
||||||
|
- TU-Berlin-1.0
|
||||||
|
- TU-Berlin-2.0
|
||||||
|
- UCL-1.0
|
||||||
|
- Unicode-DFS-2015
|
||||||
|
- Unicode-DFS-2016
|
||||||
|
- Unicode-TOU
|
||||||
|
- Unlicense
|
||||||
|
- UPL-1.0
|
||||||
|
- Vim
|
||||||
|
- VOSTROM
|
||||||
|
- VSL-1.0
|
||||||
|
- W3C
|
||||||
|
- W3C-19980720
|
||||||
|
- W3C-20150513
|
||||||
|
- Watcom-1.0
|
||||||
|
- Wsuipa
|
||||||
|
- WTFPL
|
||||||
|
- wxWindows
|
||||||
|
- X11
|
||||||
|
- X11-distribute-modifications-variant
|
||||||
|
- Xerox
|
||||||
|
- XFree86-1.1
|
||||||
|
- xinetd
|
||||||
|
- Xnet
|
||||||
|
- xpp
|
||||||
|
- XSkat
|
||||||
|
- YPL-1.0
|
||||||
|
- YPL-1.1
|
||||||
|
- Zed
|
||||||
|
- Zend-2.0
|
||||||
|
- Zimbra-1.3
|
||||||
|
- Zimbra-1.4
|
||||||
|
- Zlib
|
||||||
|
- zlib-acknowledgement
|
||||||
|
- ZPL-1.1
|
||||||
|
- ZPL-2.0
|
||||||
|
- ZPL-2.1
|
||||||
|
ScorecardContentV2Version:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
commit:
|
||||||
|
type: string
|
||||||
|
ScorecardContentV2Repository:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
commit:
|
||||||
|
type: string
|
||||||
|
description: Commit SHA where the scorecard checks where executed
|
||||||
|
ScorecardV2Check:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- CII-Best-Practices
|
||||||
|
- Fuzzing
|
||||||
|
- Pinned-Dependencies
|
||||||
|
- CI-Tests
|
||||||
|
- Maintained
|
||||||
|
- Packaging
|
||||||
|
- SAST
|
||||||
|
- Dependency-Update-Tool
|
||||||
|
- Token-Permissions
|
||||||
|
- Security-Policy
|
||||||
|
- Signed-Releases
|
||||||
|
- Binary-Artifacts
|
||||||
|
- Branch-Protection
|
||||||
|
- Code-Review
|
||||||
|
- Contributors
|
||||||
|
- Vulnerabilities
|
||||||
|
- Dangerous-Workflow
|
||||||
|
- License
|
||||||
|
- Webhooks
|
||||||
|
score:
|
||||||
|
type: number
|
||||||
|
reason:
|
||||||
|
type: string
|
||||||
|
details:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
ScorecardContentV2:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
date:
|
||||||
|
type: string
|
||||||
|
example: '2010-01-01'
|
||||||
|
format: date
|
||||||
|
repository:
|
||||||
|
$ref: '#/components/schemas/ScorecardContentV2Repository'
|
||||||
|
scorecard:
|
||||||
|
$ref: '#/components/schemas/ScorecardContentV2Version'
|
||||||
|
score:
|
||||||
|
type: number
|
||||||
|
checks:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/ScorecardV2Check'
|
||||||
|
Scorecard:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- V2
|
||||||
|
content:
|
||||||
|
$ref: '#/components/schemas/ScorecardContentV2'
|
||||||
|
PackageDependency:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
package_version:
|
||||||
|
$ref: '#/components/schemas/PackageVersion'
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
licenses:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/License'
|
||||||
|
distance:
|
||||||
|
type: integer
|
||||||
|
PackageDependents:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
total_dependents:
|
||||||
|
type: integer
|
||||||
|
direct_dependents:
|
||||||
|
type: integer
|
||||||
|
indirect_dependents:
|
||||||
|
type: integer
|
||||||
|
PackageProjectInfo:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
display_name:
|
||||||
|
type: string
|
||||||
|
issues:
|
||||||
|
type: integer
|
||||||
|
forks:
|
||||||
|
type: integer
|
||||||
|
stars:
|
||||||
|
type: integer
|
||||||
|
link:
|
||||||
|
type: string
|
||||||
|
PackageVersionInsight:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
package_version:
|
||||||
|
$ref: '#/components/schemas/PackageVersion'
|
||||||
|
projects:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/PackageProjectInfo'
|
||||||
|
licenses:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/License'
|
||||||
|
dependents:
|
||||||
|
$ref: '#/components/schemas/PackageDependents'
|
||||||
|
dependencies:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/PackageDependency'
|
||||||
|
scorecard:
|
||||||
|
$ref: '#/components/schemas/Scorecard'
|
||||||
22
auth.go
22
auth.go
@ -3,8 +3,12 @@ package main
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
"golang.org/x/term"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/internal/auth"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -13,7 +17,8 @@ var (
|
|||||||
|
|
||||||
func newAuthCommand() *cobra.Command {
|
func newAuthCommand() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "auth",
|
Use: "auth",
|
||||||
|
Short: "Configure and verify Insights API authentication",
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
fmt.Printf("You must choose an appropriate command: configure, verify\n")
|
fmt.Printf("You must choose an appropriate command: configure, verify\n")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
@ -31,7 +36,20 @@ func configureAuthCommand() *cobra.Command {
|
|||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "configure",
|
Use: "configure",
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
// Run auth.Configure()
|
fmt.Print("Enter API Key: ")
|
||||||
|
key, err := term.ReadPassword(syscall.Stdin)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = auth.Configure(auth.Config{
|
||||||
|
ApiUrl: authInsightApiBaseUrl,
|
||||||
|
ApiKey: string(key),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
|
|||||||
372
gen/insightapi/insights.client.go
Normal file
372
gen/insightapi/insights.client.go
Normal file
@ -0,0 +1,372 @@
|
|||||||
|
// Package insightapi provides primitives to interact with the openapi HTTP API.
|
||||||
|
//
|
||||||
|
// Code generated by github.com/deepmap/oapi-codegen version v1.10.1 DO NOT EDIT.
|
||||||
|
package insightapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/deepmap/oapi-codegen/pkg/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RequestEditorFn is the function signature for the RequestEditor callback function
|
||||||
|
type RequestEditorFn func(ctx context.Context, req *http.Request) error
|
||||||
|
|
||||||
|
// Doer performs HTTP requests.
|
||||||
|
//
|
||||||
|
// The standard http.Client implements this interface.
|
||||||
|
type HttpRequestDoer interface {
|
||||||
|
Do(req *http.Request) (*http.Response, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client which conforms to the OpenAPI3 specification for this service.
|
||||||
|
type Client struct {
|
||||||
|
// The endpoint of the server conforming to this interface, with scheme,
|
||||||
|
// https://api.deepmap.com for example. This can contain a path relative
|
||||||
|
// to the server, such as https://api.deepmap.com/dev-test, and all the
|
||||||
|
// paths in the swagger spec will be appended to the server.
|
||||||
|
Server string
|
||||||
|
|
||||||
|
// Doer for performing requests, typically a *http.Client with any
|
||||||
|
// customized settings, such as certificate chains.
|
||||||
|
Client HttpRequestDoer
|
||||||
|
|
||||||
|
// A list of callbacks for modifying requests which are generated before sending over
|
||||||
|
// the network.
|
||||||
|
RequestEditors []RequestEditorFn
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClientOption allows setting custom parameters during construction
|
||||||
|
type ClientOption func(*Client) error
|
||||||
|
|
||||||
|
// Creates a new Client, with reasonable defaults
|
||||||
|
func NewClient(server string, opts ...ClientOption) (*Client, error) {
|
||||||
|
// create a client with sane default values
|
||||||
|
client := Client{
|
||||||
|
Server: server,
|
||||||
|
}
|
||||||
|
// mutate client and add all optional params
|
||||||
|
for _, o := range opts {
|
||||||
|
if err := o(&client); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ensure the server URL always has a trailing slash
|
||||||
|
if !strings.HasSuffix(client.Server, "/") {
|
||||||
|
client.Server += "/"
|
||||||
|
}
|
||||||
|
// create httpClient, if not already present
|
||||||
|
if client.Client == nil {
|
||||||
|
client.Client = &http.Client{}
|
||||||
|
}
|
||||||
|
return &client, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithHTTPClient allows overriding the default Doer, which is
|
||||||
|
// automatically created using http.Client. This is useful for tests.
|
||||||
|
func WithHTTPClient(doer HttpRequestDoer) ClientOption {
|
||||||
|
return func(c *Client) error {
|
||||||
|
c.Client = doer
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithRequestEditorFn allows setting up a callback function, which will be
|
||||||
|
// called right before sending the request. This can be used to mutate the request.
|
||||||
|
func WithRequestEditorFn(fn RequestEditorFn) ClientOption {
|
||||||
|
return func(c *Client) error {
|
||||||
|
c.RequestEditors = append(c.RequestEditors, fn)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The interface specification for the client above.
|
||||||
|
type ClientInterface interface {
|
||||||
|
// GetHealthCheckStatus request
|
||||||
|
GetHealthCheckStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error)
|
||||||
|
|
||||||
|
// GetPackageVersionInsight request
|
||||||
|
GetPackageVersionInsight(ctx context.Context, ecosystem string, name string, version string, reqEditors ...RequestEditorFn) (*http.Response, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) GetHealthCheckStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) {
|
||||||
|
req, err := NewGetHealthCheckStatusRequest(c.Server)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return c.Client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) GetPackageVersionInsight(ctx context.Context, ecosystem string, name string, version string, reqEditors ...RequestEditorFn) (*http.Response, error) {
|
||||||
|
req, err := NewGetPackageVersionInsightRequest(c.Server, ecosystem, name, version)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return c.Client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewGetHealthCheckStatusRequest generates requests for GetHealthCheckStatus
|
||||||
|
func NewGetHealthCheckStatusRequest(server string) (*http.Request, error) {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
serverURL, err := url.Parse(server)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
operationPath := fmt.Sprintf("/healthz")
|
||||||
|
if operationPath[0] == '/' {
|
||||||
|
operationPath = "." + operationPath
|
||||||
|
}
|
||||||
|
|
||||||
|
queryURL, err := serverURL.Parse(operationPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", queryURL.String(), nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return req, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewGetPackageVersionInsightRequest generates requests for GetPackageVersionInsight
|
||||||
|
func NewGetPackageVersionInsightRequest(server string, ecosystem string, name string, version string) (*http.Request, error) {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
var pathParam0 string
|
||||||
|
|
||||||
|
pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ecosystem", runtime.ParamLocationPath, ecosystem)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pathParam1 string
|
||||||
|
|
||||||
|
pathParam1, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pathParam2 string
|
||||||
|
|
||||||
|
pathParam2, err = runtime.StyleParamWithLocation("simple", false, "version", runtime.ParamLocationPath, version)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
serverURL, err := url.Parse(server)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
operationPath := fmt.Sprintf("/%s/packages/%s/versions/%s", pathParam0, pathParam1, pathParam2)
|
||||||
|
if operationPath[0] == '/' {
|
||||||
|
operationPath = "." + operationPath
|
||||||
|
}
|
||||||
|
|
||||||
|
queryURL, err := serverURL.Parse(operationPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", queryURL.String(), nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return req, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error {
|
||||||
|
for _, r := range c.RequestEditors {
|
||||||
|
if err := r(ctx, req); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, r := range additionalEditors {
|
||||||
|
if err := r(ctx, req); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClientWithResponses builds on ClientInterface to offer response payloads
|
||||||
|
type ClientWithResponses struct {
|
||||||
|
ClientInterface
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClientWithResponses creates a new ClientWithResponses, which wraps
|
||||||
|
// Client with return type handling
|
||||||
|
func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) {
|
||||||
|
client, err := NewClient(server, opts...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &ClientWithResponses{client}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithBaseURL overrides the baseURL.
|
||||||
|
func WithBaseURL(baseURL string) ClientOption {
|
||||||
|
return func(c *Client) error {
|
||||||
|
newBaseURL, err := url.Parse(baseURL)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
c.Server = newBaseURL.String()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClientWithResponsesInterface is the interface specification for the client with responses above.
|
||||||
|
type ClientWithResponsesInterface interface {
|
||||||
|
// GetHealthCheckStatus request
|
||||||
|
GetHealthCheckStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetHealthCheckStatusResponse, error)
|
||||||
|
|
||||||
|
// GetPackageVersionInsight request
|
||||||
|
GetPackageVersionInsightWithResponse(ctx context.Context, ecosystem string, name string, version string, reqEditors ...RequestEditorFn) (*GetPackageVersionInsightResponse, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetHealthCheckStatusResponse struct {
|
||||||
|
Body []byte
|
||||||
|
HTTPResponse *http.Response
|
||||||
|
}
|
||||||
|
|
||||||
|
// Status returns HTTPResponse.Status
|
||||||
|
func (r GetHealthCheckStatusResponse) Status() string {
|
||||||
|
if r.HTTPResponse != nil {
|
||||||
|
return r.HTTPResponse.Status
|
||||||
|
}
|
||||||
|
return http.StatusText(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusCode returns HTTPResponse.StatusCode
|
||||||
|
func (r GetHealthCheckStatusResponse) StatusCode() int {
|
||||||
|
if r.HTTPResponse != nil {
|
||||||
|
return r.HTTPResponse.StatusCode
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetPackageVersionInsightResponse struct {
|
||||||
|
Body []byte
|
||||||
|
HTTPResponse *http.Response
|
||||||
|
JSON200 *PackageVersionInsight
|
||||||
|
JSON404 *ApiError
|
||||||
|
JSON429 *ApiError
|
||||||
|
JSON500 *ApiError
|
||||||
|
}
|
||||||
|
|
||||||
|
// Status returns HTTPResponse.Status
|
||||||
|
func (r GetPackageVersionInsightResponse) Status() string {
|
||||||
|
if r.HTTPResponse != nil {
|
||||||
|
return r.HTTPResponse.Status
|
||||||
|
}
|
||||||
|
return http.StatusText(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusCode returns HTTPResponse.StatusCode
|
||||||
|
func (r GetPackageVersionInsightResponse) StatusCode() int {
|
||||||
|
if r.HTTPResponse != nil {
|
||||||
|
return r.HTTPResponse.StatusCode
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetHealthCheckStatusWithResponse request returning *GetHealthCheckStatusResponse
|
||||||
|
func (c *ClientWithResponses) GetHealthCheckStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetHealthCheckStatusResponse, error) {
|
||||||
|
rsp, err := c.GetHealthCheckStatus(ctx, reqEditors...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ParseGetHealthCheckStatusResponse(rsp)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPackageVersionInsightWithResponse request returning *GetPackageVersionInsightResponse
|
||||||
|
func (c *ClientWithResponses) GetPackageVersionInsightWithResponse(ctx context.Context, ecosystem string, name string, version string, reqEditors ...RequestEditorFn) (*GetPackageVersionInsightResponse, error) {
|
||||||
|
rsp, err := c.GetPackageVersionInsight(ctx, ecosystem, name, version, reqEditors...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ParseGetPackageVersionInsightResponse(rsp)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseGetHealthCheckStatusResponse parses an HTTP response from a GetHealthCheckStatusWithResponse call
|
||||||
|
func ParseGetHealthCheckStatusResponse(rsp *http.Response) (*GetHealthCheckStatusResponse, error) {
|
||||||
|
bodyBytes, err := ioutil.ReadAll(rsp.Body)
|
||||||
|
defer func() { _ = rsp.Body.Close() }()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
response := &GetHealthCheckStatusResponse{
|
||||||
|
Body: bodyBytes,
|
||||||
|
HTTPResponse: rsp,
|
||||||
|
}
|
||||||
|
|
||||||
|
return response, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseGetPackageVersionInsightResponse parses an HTTP response from a GetPackageVersionInsightWithResponse call
|
||||||
|
func ParseGetPackageVersionInsightResponse(rsp *http.Response) (*GetPackageVersionInsightResponse, error) {
|
||||||
|
bodyBytes, err := ioutil.ReadAll(rsp.Body)
|
||||||
|
defer func() { _ = rsp.Body.Close() }()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
response := &GetPackageVersionInsightResponse{
|
||||||
|
Body: bodyBytes,
|
||||||
|
HTTPResponse: rsp,
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
|
||||||
|
var dest PackageVersionInsight
|
||||||
|
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
response.JSON200 = &dest
|
||||||
|
|
||||||
|
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404:
|
||||||
|
var dest ApiError
|
||||||
|
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
response.JSON404 = &dest
|
||||||
|
|
||||||
|
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 429:
|
||||||
|
var dest ApiError
|
||||||
|
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
response.JSON429 = &dest
|
||||||
|
|
||||||
|
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500:
|
||||||
|
var dest ApiError
|
||||||
|
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
response.JSON500 = &dest
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return response, nil
|
||||||
|
}
|
||||||
1252
gen/insightapi/insights.types.go
Normal file
1252
gen/insightapi/insights.types.go
Normal file
File diff suppressed because it is too large
Load Diff
20
go.mod
20
go.mod
@ -3,12 +3,30 @@ module github.com/safedep/vet
|
|||||||
go 1.18
|
go 1.18
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/deepmap/oapi-codegen v1.12.4
|
||||||
|
github.com/google/cel-go v0.13.0
|
||||||
|
github.com/google/osv-scanner v1.0.2
|
||||||
github.com/sirupsen/logrus v1.9.0
|
github.com/sirupsen/logrus v1.9.0
|
||||||
github.com/spf13/cobra v1.6.1
|
github.com/spf13/cobra v1.6.1
|
||||||
|
github.com/stretchr/testify v1.8.1
|
||||||
|
golang.org/x/term v0.3.0
|
||||||
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/BurntSushi/toml v1.2.1 // indirect
|
||||||
|
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 // indirect
|
||||||
|
github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/google/uuid v1.3.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 // indirect
|
github.com/stoewer/go-strcase v1.2.0 // indirect
|
||||||
|
golang.org/x/mod v0.7.0 // indirect
|
||||||
|
golang.org/x/sys v0.3.0 // indirect
|
||||||
|
golang.org/x/text v0.5.0 // indirect
|
||||||
|
google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c // indirect
|
||||||
|
google.golang.org/protobuf v1.28.1 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
51
go.sum
51
go.sum
@ -1,9 +1,29 @@
|
|||||||
|
github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
|
||||||
|
github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||||
|
github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
|
||||||
|
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 h1:yL7+Jz0jTC6yykIK/Wh74gnTJnrGr5AyrNMXuA0gves=
|
||||||
|
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
|
||||||
|
github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ=
|
||||||
|
github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk=
|
||||||
|
github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/deepmap/oapi-codegen v1.12.4 h1:pPmn6qI9MuOtCz82WY2Xaw46EQjgvxednXXrP7g5Q2s=
|
||||||
|
github.com/deepmap/oapi-codegen v1.12.4/go.mod h1:3lgHGMu6myQ2vqbbTXH2H1o4eXFTGnFiDaOaKKl5yas=
|
||||||
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
|
github.com/google/cel-go v0.13.0 h1:z+8OBOcmh7IeKyqwT/6IlnMvy621fYUqnTVPEdegGlU=
|
||||||
|
github.com/google/cel-go v0.13.0/go.mod h1:K2hpQgEjDp18J76a2DKFRlPBPpgRZgi6EbnpDgIhJ8s=
|
||||||
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||||
|
github.com/google/osv-scanner v1.0.2 h1:EiDbP8XQhEvo9I7WZMvA7OkJinyOULhNTD7SITS2tBY=
|
||||||
|
github.com/google/osv-scanner v1.0.2/go.mod h1:KTYFW64rATMvw7MtWAVXxIkG7u0R86n6VUKM8pzOzF0=
|
||||||
|
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||||
|
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
|
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
@ -13,12 +33,39 @@ github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
|||||||
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0=
|
||||||
|
github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU=
|
||||||
|
github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 h1:0A+M6Uqn+Eje4kHMK80dtF3JCXC4ykBgQG4Fe06QRhQ=
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||||
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
|
||||||
|
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
||||||
|
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
|
||||||
|
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||||
|
golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM=
|
||||||
|
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c h1:QgY/XxIAIeccR+Ca/rDdKubLIU9rcJ3xfy1DC/Wd2Oo=
|
||||||
|
google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c/go.mod h1:CGI5F/G+E5bKwmfYo09AXuVN4dD894kIKUFmVbP2/Fo=
|
||||||
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
|
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
|
||||||
|
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
97
internal/auth/auth.go
Normal file
97
internal/auth/auth.go
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
package auth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
apiUrlEnvKey = "VET_INSIGHTS_API_URL"
|
||||||
|
apiKeyEnvKey = "VET_INSIGHTS_API_KEY"
|
||||||
|
|
||||||
|
defaultApiUrl = "https://api.safedep.io/insights/v1"
|
||||||
|
|
||||||
|
homeRelativeConfigPath = ".safedep/vet-auth.yml"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
ApiUrl string `yaml:"api_url"`
|
||||||
|
ApiKey string `yaml:"api_key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global config to be used during runtime
|
||||||
|
var globalConfig *Config
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
loadConfiguration()
|
||||||
|
}
|
||||||
|
|
||||||
|
func Configure(m Config) error {
|
||||||
|
globalConfig = &m
|
||||||
|
return persistConfiguration()
|
||||||
|
}
|
||||||
|
|
||||||
|
func Verify() error {
|
||||||
|
// TODO: Verify by actually calling insight API
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ApiUrl() string {
|
||||||
|
if url, ok := os.LookupEnv(apiUrlEnvKey); ok {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
if globalConfig != nil {
|
||||||
|
return globalConfig.ApiUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
return defaultApiUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
func ApiKey() string {
|
||||||
|
if key, ok := os.LookupEnv(apiKeyEnvKey); ok {
|
||||||
|
return key
|
||||||
|
}
|
||||||
|
|
||||||
|
if globalConfig != nil {
|
||||||
|
return globalConfig.ApiKey
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadConfiguration() error {
|
||||||
|
path, err := os.UserHomeDir()
|
||||||
|
path = filepath.Join(path, homeRelativeConfigPath)
|
||||||
|
|
||||||
|
data, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var config Config
|
||||||
|
err = yaml.Unmarshal(data, &config)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("config deserialization failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
globalConfig = &config
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func persistConfiguration() error {
|
||||||
|
data, err := yaml.Marshal(globalConfig)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("config serialization failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
path, err := os.UserHomeDir()
|
||||||
|
path = filepath.Join(path, homeRelativeConfigPath)
|
||||||
|
|
||||||
|
os.MkdirAll(filepath.Dir(path), os.ModePerm)
|
||||||
|
return ioutil.WriteFile(path, data, 0600)
|
||||||
|
}
|
||||||
5
main.go
5
main.go
@ -4,6 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -33,6 +34,10 @@ func main() {
|
|||||||
cmd.AddCommand(newScanCommand())
|
cmd.AddCommand(newScanCommand())
|
||||||
cmd.AddCommand(newVersionCommand())
|
cmd.AddCommand(newVersionCommand())
|
||||||
|
|
||||||
|
cobra.OnInitialize(func() {
|
||||||
|
logger.SetLogLevel(verbose, debug)
|
||||||
|
})
|
||||||
|
|
||||||
if err := cmd.Execute(); err != nil {
|
if err := cmd.Execute(); err != nil {
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|||||||
32
pkg/analyzer/analyzer.go
Normal file
32
pkg/analyzer/analyzer.go
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package analyzer
|
||||||
|
|
||||||
|
import "github.com/safedep/vet/pkg/models"
|
||||||
|
|
||||||
|
type AnalyzerEventType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ET_FilterExpressionMatched = AnalyzerEventType("ev_pkg_filter_match")
|
||||||
|
)
|
||||||
|
|
||||||
|
type AnalyzerEvent struct {
|
||||||
|
// Analyzer generating this event
|
||||||
|
Source string
|
||||||
|
|
||||||
|
// Type of the event
|
||||||
|
Type AnalyzerEventType
|
||||||
|
|
||||||
|
// Entities on which event was generated
|
||||||
|
Manifest *models.PackageManifest
|
||||||
|
Package *models.Package
|
||||||
|
}
|
||||||
|
|
||||||
|
// Callback to receive events from analyzer
|
||||||
|
type AnalyzerEventHandler func(event *AnalyzerEvent) error
|
||||||
|
|
||||||
|
// Contract for an analyzer
|
||||||
|
type Analyzer interface {
|
||||||
|
Name() string
|
||||||
|
|
||||||
|
Analyze(manifest *models.PackageManifest,
|
||||||
|
handler AnalyzerEventHandler) error
|
||||||
|
}
|
||||||
95
pkg/analyzer/cel_filter.go
Normal file
95
pkg/analyzer/cel_filter.go
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
package analyzer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"github.com/google/cel-go/cel"
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type celFilterAnalyzer struct {
|
||||||
|
program cel.Program
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCelFilterAnalyzer(filter string) (Analyzer, error) {
|
||||||
|
env, err := cel.NewEnv(
|
||||||
|
cel.Variable("pkg", cel.DynType),
|
||||||
|
cel.Variable("manifest", cel.DynType),
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ast, issues := env.Compile(filter)
|
||||||
|
if issues != nil && issues.Err() != nil {
|
||||||
|
return nil, issues.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
prog, err := env.Program(ast)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &celFilterAnalyzer{program: prog}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *celFilterAnalyzer) Name() string {
|
||||||
|
return "CEL Filter Analyzer"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *celFilterAnalyzer) Analyze(manifest *models.PackageManifest,
|
||||||
|
handler AnalyzerEventHandler) error {
|
||||||
|
|
||||||
|
pkgManifestVal, err := f.valType(manifest)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to convert manifest to val: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("CEL filtering manifest: %s", manifest.Path)
|
||||||
|
for _, pkg := range manifest.Packages {
|
||||||
|
pkgVal, err := f.valType(pkg)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to convert package to val: %v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
out, _, err := f.program.Eval(map[string]interface{}{
|
||||||
|
"pkg": pkgVal,
|
||||||
|
"manifest": pkgManifestVal,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to evaluate CEL for %s:%v : %v",
|
||||||
|
pkg.PackageDetails.Name,
|
||||||
|
pkg.PackageDetails.Version, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reflect.TypeOf(out).Kind() == reflect.Bool) &&
|
||||||
|
(reflect.ValueOf(out).Bool()) {
|
||||||
|
fmt.Printf("[%s] %s %v\n", pkg.PackageDetails.Ecosystem,
|
||||||
|
pkg.PackageDetails.Name, pkg.PackageDetails.Version)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *celFilterAnalyzer) valType(i any) (any, error) {
|
||||||
|
data, err := json.Marshal(i)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret := make(map[string]interface{})
|
||||||
|
err = json.Unmarshal(data, &ret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
60
pkg/analyzer/json_dump.go
Normal file
60
pkg/analyzer/json_dump.go
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package analyzer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"math/rand"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type jsonDumperAnalyzer struct {
|
||||||
|
dir string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewJsonDumperAnalyzer(dir string) (Analyzer, error) {
|
||||||
|
fi, err := os.Stat(dir)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
err = os.MkdirAll(dir, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot create dir: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("cannot stat dir: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fi.IsDir() {
|
||||||
|
return nil, fmt.Errorf("%s is not a dir", dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &jsonDumperAnalyzer{dir: dir}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *jsonDumperAnalyzer) Name() string {
|
||||||
|
return "JSON Dump Generator"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *jsonDumperAnalyzer) Analyze(manifest *models.PackageManifest,
|
||||||
|
handler AnalyzerEventHandler) error {
|
||||||
|
|
||||||
|
logger.Infof("Running analyzer: %s", j.Name())
|
||||||
|
data, err := json.MarshalIndent(manifest, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Failed to JSON serialize manifest: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
rand.Seed(time.Now().UnixNano())
|
||||||
|
path := filepath.Join(j.dir, fmt.Sprintf("%s-%s--%d-dump.json",
|
||||||
|
manifest.Ecosystem,
|
||||||
|
filepath.Base(manifest.Path),
|
||||||
|
rand.Intn(2<<15)))
|
||||||
|
|
||||||
|
return ioutil.WriteFile(path, data, 0600)
|
||||||
|
}
|
||||||
@ -8,7 +8,7 @@ import (
|
|||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
logrus.SetOutput(os.Stdout)
|
logrus.SetOutput(os.Stdout)
|
||||||
logrus.SetLevel(logrus.InfoLevel)
|
logrus.SetLevel(logrus.WarnLevel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func SetLogLevel(verbose, debug bool) {
|
func SetLogLevel(verbose, debug bool) {
|
||||||
@ -18,7 +18,6 @@ func SetLogLevel(verbose, debug bool) {
|
|||||||
|
|
||||||
if debug {
|
if debug {
|
||||||
logrus.SetLevel(logrus.DebugLevel)
|
logrus.SetLevel(logrus.DebugLevel)
|
||||||
logrus.SetReportCaller(true)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
73
pkg/common/utils/workq.go
Normal file
73
pkg/common/utils/workq.go
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type WorkQueueItem interface {
|
||||||
|
Id() string
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorkQueueFn[T WorkQueueItem] func(q *WorkQueue[T], item T) error
|
||||||
|
|
||||||
|
type WorkQueue[T WorkQueueItem] struct {
|
||||||
|
done chan bool
|
||||||
|
m sync.Mutex
|
||||||
|
concurrency int
|
||||||
|
wg sync.WaitGroup
|
||||||
|
handler WorkQueueFn[T]
|
||||||
|
status sync.Map
|
||||||
|
items chan T
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWorkQueue[T WorkQueueItem](bufferSize int, concurrency int,
|
||||||
|
handler WorkQueueFn[T]) *WorkQueue[T] {
|
||||||
|
return &WorkQueue[T]{
|
||||||
|
handler: handler,
|
||||||
|
concurrency: concurrency,
|
||||||
|
items: make(chan T, bufferSize),
|
||||||
|
done: make(chan bool),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *WorkQueue[T]) Start() {
|
||||||
|
for i := 0; i < q.concurrency; i++ {
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-q.done:
|
||||||
|
return
|
||||||
|
case item := <-q.items:
|
||||||
|
err := q.handler(q, item)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Handler fn failed with %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
q.wg.Done()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *WorkQueue[T]) Wait() {
|
||||||
|
q.wg.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *WorkQueue[T]) Stop() {
|
||||||
|
close(q.done)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *WorkQueue[T]) Add(item T) bool {
|
||||||
|
if _, ok := q.status.Load(item.Id()); ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
q.status.Store(item.Id(), true)
|
||||||
|
q.wg.Add(1)
|
||||||
|
|
||||||
|
q.items <- item
|
||||||
|
return true
|
||||||
|
}
|
||||||
85
pkg/models/models.go
Normal file
85
pkg/models/models.go
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"hash/fnv"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/google/osv-scanner/pkg/lockfile"
|
||||||
|
"github.com/safedep/vet/gen/insightapi"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
EcosystemMaven = "Maven"
|
||||||
|
EcosystemRubyGems = "RubyGems"
|
||||||
|
EcosystemGo = "Go"
|
||||||
|
EcosystemNpm = "npm"
|
||||||
|
EcosystemPyPI = "PyPI"
|
||||||
|
EcosystemCargo = "Cargo"
|
||||||
|
EcosystemNuGet = "NuGet"
|
||||||
|
EcosystemPackagist = "Packagist"
|
||||||
|
EcosystemHex = "Hex"
|
||||||
|
EcosystemPub = "Pub"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Represents a package manifest that contains a list
|
||||||
|
// of packages. Example: pom.xml, requirements.txt
|
||||||
|
type PackageManifest struct {
|
||||||
|
// Filesystem path of this manifest
|
||||||
|
Path string `json:"path"`
|
||||||
|
|
||||||
|
// Ecosystem to interpret this manifest
|
||||||
|
Ecosystem string `json:"ecosystem"`
|
||||||
|
|
||||||
|
// List of packages obtained by parsing the manifest
|
||||||
|
Packages []*Package `json:"packages"`
|
||||||
|
|
||||||
|
// Lock to serialize updating packages
|
||||||
|
m sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pm *PackageManifest) AddPackage(pkg *Package) {
|
||||||
|
pm.m.Lock()
|
||||||
|
defer pm.m.Unlock()
|
||||||
|
|
||||||
|
pm.Packages = append(pm.Packages, pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represents a package such as a version of a library defined as a dependency
|
||||||
|
// in Gemfile.lock, pom.xml etc.
|
||||||
|
type Package struct {
|
||||||
|
lockfile.PackageDetails `json:"package_detail"`
|
||||||
|
|
||||||
|
// Insights obtained for this package
|
||||||
|
Insights *insightapi.PackageVersionInsight `json:"insights"`
|
||||||
|
|
||||||
|
// This package is a transitive dependency of parent package
|
||||||
|
Parent *Package `json:"-"`
|
||||||
|
|
||||||
|
// Depth of this package in dependency tree
|
||||||
|
Depth int `json:"depth"`
|
||||||
|
|
||||||
|
// Manifest from where this package was found directly or indirectly
|
||||||
|
Manifest *PackageManifest `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Package) Id() string {
|
||||||
|
h := fnv.New64a()
|
||||||
|
h.Write([]byte(fmt.Sprintf("%s/%s/%s",
|
||||||
|
strings.ToLower(p.Manifest.Ecosystem),
|
||||||
|
strings.ToLower(p.PackageDetails.Name),
|
||||||
|
strings.ToLower(p.PackageDetails.Version))))
|
||||||
|
|
||||||
|
return strconv.FormatUint(h.Sum64(), 16)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPackageDetail(e, n, v string) lockfile.PackageDetails {
|
||||||
|
return lockfile.PackageDetails{
|
||||||
|
Ecosystem: lockfile.Ecosystem(e),
|
||||||
|
CompareAs: lockfile.Ecosystem(e),
|
||||||
|
Name: n,
|
||||||
|
Version: v,
|
||||||
|
}
|
||||||
|
}
|
||||||
89
pkg/parser/parser.go
Normal file
89
pkg/parser/parser.go
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
package parser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/osv-scanner/pkg/lockfile"
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Parser interface {
|
||||||
|
Ecosystem() string
|
||||||
|
Parse(lockfilePath string) (models.PackageManifest, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type parserWrapper struct {
|
||||||
|
parser lockfile.PackageDetailsParser
|
||||||
|
parseAs string
|
||||||
|
}
|
||||||
|
|
||||||
|
func List() []string {
|
||||||
|
return lockfile.ListParsers()
|
||||||
|
}
|
||||||
|
|
||||||
|
func FindParser(lockfilePath, lockfileAs string) (Parser, error) {
|
||||||
|
p, pa := lockfile.FindParser(lockfilePath, lockfileAs)
|
||||||
|
if p != nil {
|
||||||
|
return &parserWrapper{parser: p, parseAs: pa}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("no parser found with: %s for: %s", lockfileAs,
|
||||||
|
lockfilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pw *parserWrapper) Ecosystem() string {
|
||||||
|
switch pw.parseAs {
|
||||||
|
case "Cargo.lock":
|
||||||
|
return models.EcosystemCargo
|
||||||
|
case "composer.lock":
|
||||||
|
return models.EcosystemPackagist
|
||||||
|
case "Gemfile.lock":
|
||||||
|
return models.EcosystemRubyGems
|
||||||
|
case "go.mod":
|
||||||
|
return models.EcosystemGo
|
||||||
|
case "mix.lock":
|
||||||
|
return models.EcosystemHex
|
||||||
|
case "package-lock.json":
|
||||||
|
return models.EcosystemNpm
|
||||||
|
case "pnpm-lock.yaml":
|
||||||
|
return models.EcosystemNpm
|
||||||
|
case "poetry.lock":
|
||||||
|
return models.EcosystemPyPI
|
||||||
|
case "pom.xml":
|
||||||
|
return models.EcosystemMaven
|
||||||
|
case "pubspec.lock":
|
||||||
|
return models.EcosystemPub
|
||||||
|
case "requirements.txt":
|
||||||
|
return models.EcosystemPyPI
|
||||||
|
case "yarn.lock":
|
||||||
|
return models.EcosystemNpm
|
||||||
|
case "gradle.lockfile":
|
||||||
|
return models.EcosystemMaven
|
||||||
|
case "buildscript-gradle.lockfile":
|
||||||
|
return models.EcosystemMaven
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pw *parserWrapper) Parse(lockfilePath string) (models.PackageManifest, error) {
|
||||||
|
pm := models.PackageManifest{Path: lockfilePath,
|
||||||
|
Ecosystem: pw.Ecosystem()}
|
||||||
|
|
||||||
|
logger.Infof("[%s] Parsing %s", pw.parseAs, lockfilePath)
|
||||||
|
|
||||||
|
packages, err := pw.parser(lockfilePath)
|
||||||
|
if err != nil {
|
||||||
|
return pm, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pkg := range packages {
|
||||||
|
pm.AddPackage(&models.Package{
|
||||||
|
PackageDetails: pkg,
|
||||||
|
Manifest: &pm,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return pm, nil
|
||||||
|
}
|
||||||
26
pkg/parser/parser_test.go
Normal file
26
pkg/parser/parser_test.go
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package parser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestListParser(t *testing.T) {
|
||||||
|
parsers := List()
|
||||||
|
assert.Equal(t, 14, len(parsers))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidEcosystemMapping(t *testing.T) {
|
||||||
|
pw := &parserWrapper{parseAs: "nothing"}
|
||||||
|
assert.Empty(t, pw.Ecosystem())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEcosystemMapping(t *testing.T) {
|
||||||
|
for _, lf := range List() {
|
||||||
|
t.Run(lf, func(t *testing.T) {
|
||||||
|
pw := &parserWrapper{parseAs: lf}
|
||||||
|
assert.NotEmpty(t, pw.Ecosystem())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
94
pkg/scanner/enrich.go
Normal file
94
pkg/scanner/enrich.go
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
package scanner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/gen/insightapi"
|
||||||
|
"github.com/safedep/vet/internal/auth"
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Callback to receive a discovery package dependency
|
||||||
|
type PackageDependencyCallbackFn func(pkg *models.Package) error
|
||||||
|
|
||||||
|
// Enrich meta information associated with
|
||||||
|
// the package
|
||||||
|
type PackageMetaEnricher interface {
|
||||||
|
Name() string
|
||||||
|
Enrich(pkg *models.Package, cb PackageDependencyCallbackFn) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type insightsBasedPackageEnricher struct {
|
||||||
|
client *insightapi.ClientWithResponses
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInsightBasedPackageEnricher() PackageMetaEnricher {
|
||||||
|
apiKeyApplier := func(ctx context.Context, req *http.Request) error {
|
||||||
|
req.Header.Set("Authorization", auth.ApiKey())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := insightapi.NewClientWithResponses(auth.ApiUrl(),
|
||||||
|
insightapi.WithRequestEditorFn(apiKeyApplier))
|
||||||
|
if err != nil {
|
||||||
|
// TODO: Handle
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &insightsBasedPackageEnricher{
|
||||||
|
client: client,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *insightsBasedPackageEnricher) Name() string {
|
||||||
|
return "Insights API"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *insightsBasedPackageEnricher) Enrich(pkg *models.Package,
|
||||||
|
cb PackageDependencyCallbackFn) error {
|
||||||
|
|
||||||
|
logger.Infof("[%s] Enriching %s/%s", pkg.Manifest.Ecosystem,
|
||||||
|
pkg.PackageDetails.Name, pkg.PackageDetails.Version)
|
||||||
|
|
||||||
|
res, err := e.client.GetPackageVersionInsightWithResponse(context.Background(),
|
||||||
|
pkg.Manifest.Ecosystem, pkg.Name, pkg.Version)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to enrich package: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.HTTPResponse.StatusCode != 200 {
|
||||||
|
return fmt.Errorf("bad response: %d: %s", res.HTTPResponse.StatusCode,
|
||||||
|
res.HTTPResponse.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res.JSON200 == nil) || (res.JSON200.Dependencies == nil) {
|
||||||
|
return fmt.Errorf("unexpected nil response from Insight API")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, dep := range *res.JSON200.Dependencies {
|
||||||
|
if strings.EqualFold(dep.PackageVersion.Name, pkg.PackageDetails.Name) {
|
||||||
|
// Skip self references in dependency
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
err := cb(&models.Package{
|
||||||
|
Manifest: pkg.Manifest,
|
||||||
|
Parent: pkg,
|
||||||
|
Depth: pkg.Depth + 1,
|
||||||
|
PackageDetails: models.NewPackageDetail(dep.PackageVersion.Ecosystem,
|
||||||
|
dep.PackageVersion.Name, dep.PackageVersion.Version),
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to invoke package dependency callback: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.Insights = res.JSON200
|
||||||
|
return nil
|
||||||
|
}
|
||||||
150
pkg/scanner/scanner.go
Normal file
150
pkg/scanner/scanner.go
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
package scanner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/safedep/vet/pkg/analyzer"
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/common/utils"
|
||||||
|
"github.com/safedep/vet/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
ConcurrentAnalyzer int
|
||||||
|
TransitiveAnalysis bool
|
||||||
|
TransitiveDepth int
|
||||||
|
}
|
||||||
|
|
||||||
|
type packageManifestScanner struct {
|
||||||
|
config Config
|
||||||
|
enrichers []PackageMetaEnricher
|
||||||
|
analyzers []analyzer.Analyzer
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPackageManifestScanner(config Config,
|
||||||
|
enrichers []PackageMetaEnricher,
|
||||||
|
analyzers []analyzer.Analyzer) *packageManifestScanner {
|
||||||
|
return &packageManifestScanner{
|
||||||
|
config: config,
|
||||||
|
enrichers: enrichers,
|
||||||
|
analyzers: analyzers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Autodiscover lockfiles
|
||||||
|
func (s *packageManifestScanner) ScanDirectory(dir string) error {
|
||||||
|
logger.Infof("Starting package manifest scanner on dir: %s", dir)
|
||||||
|
|
||||||
|
manifests, err := scanDirectoryForManifests(dir)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to scan directory: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("Discovered %d manifest(s)", len(manifests))
|
||||||
|
return s.analyzeManifests(manifests)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan specific lockfiles, optionally interpreted as instead of
|
||||||
|
// automatic parser selection
|
||||||
|
func (s *packageManifestScanner) ScanLockfiles(lockfiles []string,
|
||||||
|
lockfileAs string) error {
|
||||||
|
logger.Infof("Scannding %d lockfiles as %s", len(lockfiles), lockfileAs)
|
||||||
|
|
||||||
|
manifests, err := scanLockfilesForManifests(lockfiles, lockfileAs)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to scan lockfiles: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("Discovered %d manifest(s)", len(manifests))
|
||||||
|
return s.analyzeManifests(manifests)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *packageManifestScanner) analyzeManifests(manifests []*models.PackageManifest) error {
|
||||||
|
for _, manifest := range manifests {
|
||||||
|
logger.Infof("Analysing %s as %s ecosystem with %d packages", manifest.Path,
|
||||||
|
manifest.Ecosystem, len(manifest.Packages))
|
||||||
|
|
||||||
|
err := s.enrichManifest(manifest)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to enrich %s manifest %s : %v",
|
||||||
|
manifest.Ecosystem, manifest.Path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = s.analyzeManifest(manifest)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to analyze %s manifest %v : %v",
|
||||||
|
manifest.Ecosystem, manifest.Path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *packageManifestScanner) analyzeManifest(manifest *models.PackageManifest) error {
|
||||||
|
for _, task := range s.analyzers {
|
||||||
|
err := task.Analyze(manifest, func(event *analyzer.AnalyzerEvent) error {
|
||||||
|
// Handle analyzer event
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Analyzer %s failed: %v", task.Name(), err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *packageManifestScanner) enrichManifest(manifest *models.PackageManifest) error {
|
||||||
|
// FIXME: Potential deadlock situation in case of channel buffer is full
|
||||||
|
// because the goroutines perform both read and write to channel. Write occurs
|
||||||
|
// when goroutine invokes the work queue handler and the handler pushes back
|
||||||
|
// the dependencies
|
||||||
|
q := utils.NewWorkQueue[*models.Package](100000,
|
||||||
|
s.config.ConcurrentAnalyzer,
|
||||||
|
s.packageEnrichWorkQueueHandler(manifest))
|
||||||
|
q.Start()
|
||||||
|
|
||||||
|
for _, pkg := range manifest.Packages {
|
||||||
|
q.Add(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
q.Wait()
|
||||||
|
q.Stop()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *packageManifestScanner) packageEnrichWorkQueueHandler(pm *models.PackageManifest) utils.WorkQueueFn[*models.Package] {
|
||||||
|
return func(q *utils.WorkQueue[*models.Package], item *models.Package) error {
|
||||||
|
for _, enricher := range s.enrichers {
|
||||||
|
err := enricher.Enrich(item, s.packageDependencyHandler(pm, q))
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Enricher %s failed with %v", enricher.Name(), err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *packageManifestScanner) packageDependencyHandler(pm *models.PackageManifest,
|
||||||
|
q *utils.WorkQueue[*models.Package]) PackageDependencyCallbackFn {
|
||||||
|
return func(pkg *models.Package) error {
|
||||||
|
if !s.config.TransitiveAnalysis {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkg.Depth >= s.config.TransitiveDepth {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Debugf("Adding transitive dependency %s/%v to work queue",
|
||||||
|
pkg.PackageDetails.Name, pkg.PackageDetails.Version)
|
||||||
|
|
||||||
|
if q.Add(pkg) {
|
||||||
|
pm.AddPackage(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
64
pkg/scanner/utils.go
Normal file
64
pkg/scanner/utils.go
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
package scanner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/models"
|
||||||
|
"github.com/safedep/vet/pkg/parser"
|
||||||
|
)
|
||||||
|
|
||||||
|
func scanDirectoryForManifests(dir string) ([]*models.PackageManifest, error) {
|
||||||
|
var manifests []*models.PackageManifest
|
||||||
|
err := filepath.WalkDir(dir, func(path string, info os.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() && info.Name() == ".git" {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
|
||||||
|
path, err = filepath.Abs(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
p, err := parser.FindParser(path, "")
|
||||||
|
if err == nil {
|
||||||
|
// We have a parseable file
|
||||||
|
manifest, err := p.Parse(path)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("Failed to parse: %s due to %v", path, err)
|
||||||
|
} else {
|
||||||
|
manifests = append(manifests, &manifest)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return manifests, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanLockfilesForManifests(lockfiles []string, lockfileAs string) ([]*models.PackageManifest, error) {
|
||||||
|
var manifests []*models.PackageManifest
|
||||||
|
for _, lf := range lockfiles {
|
||||||
|
p, err := parser.FindParser(lf, lockfileAs)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("Failed to parse %s as %s", lf, lockfileAs)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
manifest, err := p.Parse(lf)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("Failed to parse: %s due to %v", lf, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
manifests = append(manifests, &manifest)
|
||||||
|
}
|
||||||
|
|
||||||
|
return manifests, nil
|
||||||
|
}
|
||||||
101
scan.go
101
scan.go
@ -1,21 +1,32 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/safedep/vet/pkg/analyzer"
|
||||||
"github.com/safedep/vet/pkg/common/logger"
|
"github.com/safedep/vet/pkg/common/logger"
|
||||||
|
"github.com/safedep/vet/pkg/parser"
|
||||||
|
"github.com/safedep/vet/pkg/scanner"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
lockfiles []string
|
lockfiles []string
|
||||||
lockfileAs string
|
lockfileAs string
|
||||||
baseDirectory string
|
baseDirectory string
|
||||||
|
transitiveAnalysis bool
|
||||||
|
transitiveDepth int
|
||||||
|
concurrency int
|
||||||
|
dumpJsonManifest bool
|
||||||
|
dumpJsonManifestDir string
|
||||||
|
celFilterExpression string
|
||||||
)
|
)
|
||||||
|
|
||||||
func newScanCommand() *cobra.Command {
|
func newScanCommand() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "scan",
|
Use: "scan",
|
||||||
|
Short: "Scan and analyse package manifests",
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
startScan()
|
startScan()
|
||||||
return nil
|
return nil
|
||||||
@ -31,13 +42,85 @@ func newScanCommand() *cobra.Command {
|
|||||||
"The directory to scan for lockfiles")
|
"The directory to scan for lockfiles")
|
||||||
cmd.Flags().StringArrayVarP(&lockfiles, "lockfiles", "L", []string{},
|
cmd.Flags().StringArrayVarP(&lockfiles, "lockfiles", "L", []string{},
|
||||||
"List of lockfiles to scan")
|
"List of lockfiles to scan")
|
||||||
cmd.Flags().StringVarP(&baseDirectory, "lockfile-as", "", "",
|
cmd.Flags().StringVarP(&lockfileAs, "lockfile-as", "", "",
|
||||||
"Ecosystem to interpret the lockfile as")
|
"Parser to use for the lockfile (vet scan parsers to list)")
|
||||||
|
cmd.Flags().BoolVarP(&transitiveAnalysis, "transitive", "", true,
|
||||||
|
"Analyze transitive dependencies")
|
||||||
|
cmd.Flags().IntVarP(&transitiveDepth, "transitive-depth", "", 2,
|
||||||
|
"Analyze transitive dependencies till depth")
|
||||||
|
cmd.Flags().IntVarP(&concurrency, "concurrency", "C", 10,
|
||||||
|
"Number of goroutines to use for analysis")
|
||||||
|
cmd.Flags().BoolVarP(&dumpJsonManifest, "json-dump", "", false,
|
||||||
|
"Dump enriched manifests as JSON docs")
|
||||||
|
cmd.Flags().StringVarP(&dumpJsonManifestDir, "json-dump-dir", "", "",
|
||||||
|
"Dump dir for enriched JSON docs")
|
||||||
|
cmd.Flags().StringVarP(&celFilterExpression, "filter-cel", "", "",
|
||||||
|
"Filter and print packages using CEL")
|
||||||
|
|
||||||
|
cmd.AddCommand(listParsersCommand())
|
||||||
return cmd
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
func startScan() {
|
func listParsersCommand() *cobra.Command {
|
||||||
logger.SetLogLevel(verbose, debug)
|
return &cobra.Command{
|
||||||
logger.Infof("Starting vet scanner")
|
Use: "parsers",
|
||||||
|
Short: "List available lockfile parsers",
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
fmt.Printf("Available Lockfile Parsers\n")
|
||||||
|
fmt.Printf("==========================\n\n")
|
||||||
|
|
||||||
|
for idx, p := range parser.List() {
|
||||||
|
fmt.Printf("[%d] %s\n", idx, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func startScan() {
|
||||||
|
err := internalStartScan()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Scan completed with error: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func internalStartScan() error {
|
||||||
|
analyzers := []analyzer.Analyzer{}
|
||||||
|
if dumpJsonManifest {
|
||||||
|
task, err := analyzer.NewJsonDumperAnalyzer(dumpJsonManifestDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
analyzers = append(analyzers, task)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(celFilterExpression) > 0 {
|
||||||
|
task, err := analyzer.NewCelFilterAnalyzer(celFilterExpression)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
analyzers = append(analyzers, task)
|
||||||
|
}
|
||||||
|
|
||||||
|
enrichers := []scanner.PackageMetaEnricher{
|
||||||
|
scanner.NewInsightBasedPackageEnricher(),
|
||||||
|
}
|
||||||
|
|
||||||
|
pmScanner := scanner.NewPackageManifestScanner(scanner.Config{
|
||||||
|
TransitiveAnalysis: transitiveAnalysis,
|
||||||
|
TransitiveDepth: transitiveDepth,
|
||||||
|
ConcurrentAnalyzer: concurrency,
|
||||||
|
}, enrichers, analyzers)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if len(lockfiles) > 0 {
|
||||||
|
err = pmScanner.ScanLockfiles(lockfiles, lockfileAs)
|
||||||
|
} else {
|
||||||
|
err = pmScanner.ScanDirectory(baseDirectory)
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
11
version.go
11
version.go
@ -7,15 +7,16 @@ import (
|
|||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var GITCOMMIT string
|
var version string
|
||||||
var VERSION string
|
var commit string
|
||||||
|
|
||||||
func newVersionCommand() *cobra.Command {
|
func newVersionCommand() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "version",
|
Use: "version",
|
||||||
|
Short: "Show version and build information",
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
fmt.Fprintf(os.Stdout, "Version: %s\n", VERSION)
|
fmt.Fprintf(os.Stdout, "Version: %s\n", version)
|
||||||
fmt.Fprintf(os.Stdout, "CommitSHA: %s\n", GITCOMMIT)
|
fmt.Fprintf(os.Stdout, "CommitSHA: %s\n", commit)
|
||||||
|
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user