diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index ae00a797..08119835 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -14,9 +14,9 @@ jobs: steps: - name: Set up Go 1.x - uses: actions/setup-go@v2 + uses: actions/setup-go@v5 with: - go-version: ^1.18 + go-version: '1.25' - name: Check out code into the Go module directory uses: actions/checkout@v2 @@ -24,16 +24,19 @@ jobs: fetch-depth: 1 ref: ${{ github.event.pull_request.head.sha }} + - name: Install just + uses: extractions/setup-just@v2 + - name: Build the binary - run: make build + run: just build - name: Setup tests - run: make test_setup + run: just test-setup env: CODE_PATH: /home/runner/code - name: Run tests - run: make test + run: just test env: CODE_PATH: /home/runner/code diff --git a/.github/workflows/build-and-deploy.yml b/.github/workflows/build-and-deploy.yml new file mode 100644 index 00000000..86551111 --- /dev/null +++ b/.github/workflows/build-and-deploy.yml @@ -0,0 +1,373 @@ +name: Build and Deploy + +on: + push: + tags: + - 'v*.*.*' + - 'v*.*.*-*' + +permissions: + contents: read + +env: + GO_VERSION: '1.25' + +jobs: + resolve-env: + runs-on: ubuntu-latest + outputs: + environment: ${{ steps.resolve.outputs.environment }} + base_url: ${{ steps.resolve.outputs.base_url }} + version: ${{ steps.resolve.outputs.version }} + binary_name: ${{ steps.resolve.outputs.binary_name }} + steps: + - name: Resolve environment from tag + id: resolve + run: | + TAG="${GITHUB_REF#refs/tags/v}" + if [[ "$TAG" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "environment=prod" >> "$GITHUB_OUTPUT" + echo "base_url=https://deepsource.com/cli" >> "$GITHUB_OUTPUT" + echo "binary_name=deepsource" >> "$GITHUB_OUTPUT" + else + echo "environment=dev" >> "$GITHUB_OUTPUT" + echo "base_url=https://cli.deepsource.one" >> "$GITHUB_OUTPUT" + echo "binary_name=deepsource-dev" >> "$GITHUB_OUTPUT" + fi + echo "version=${TAG}" >> "$GITHUB_OUTPUT" + + build-linux: + needs: resolve-env + runs-on: ubuntu-latest + strategy: + matrix: + include: + - goarch: amd64 + cc: x86_64-linux-gnu-gcc + cxx: x86_64-linux-gnu-g++ + - goarch: arm64 + cc: aarch64-linux-gnu-gcc + cxx: aarch64-linux-gnu-g++ + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Install cross-compiler + if: matrix.goarch == 'arm64' + run: sudo apt-get update && sudo apt-get install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu + + - name: Build + env: + CGO_ENABLED: '1' + GOOS: linux + GOARCH: ${{ matrix.goarch }} + CC: ${{ matrix.cc }} + CXX: ${{ matrix.cxx }} + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + cd cmd/deepsource && go build -tags static_all \ + -ldflags "-X 'main.version=${{ needs.resolve-env.outputs.version }}' -X 'main.Date=$(date -u +%Y-%m-%d)' -X 'main.SentryDSN=${{ secrets.SENTRY_DSN }}' -X 'main.buildMode=${{ needs.resolve-env.outputs.environment }}'" \ + -o "$BINARY_NAME" . + + - name: Package + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + ARCHIVE="${BINARY_NAME}_${{ needs.resolve-env.outputs.version }}_linux_${{ matrix.goarch }}.tar.gz" + tar -czf "$ARCHIVE" -C cmd/deepsource "$BINARY_NAME" + sha256sum "$ARCHIVE" > "${ARCHIVE}.sha256" + echo "ARCHIVE=${ARCHIVE}" >> "$GITHUB_ENV" + + - uses: actions/upload-artifact@v4 + with: + name: build-linux-${{ matrix.goarch }} + path: | + ${{ needs.resolve-env.outputs.binary_name }}_*.tar.gz + ${{ needs.resolve-env.outputs.binary_name }}_*.tar.gz.sha256 + + build-darwin: + needs: resolve-env + runs-on: macos-latest + strategy: + matrix: + goarch: [amd64, arm64] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Build + env: + CGO_ENABLED: '1' + GOOS: darwin + GOARCH: ${{ matrix.goarch }} + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + cd cmd/deepsource && go build -tags static_all \ + -ldflags "-X 'main.version=${{ needs.resolve-env.outputs.version }}' -X 'main.Date=$(date -u +%Y-%m-%d)' -X 'main.SentryDSN=${{ secrets.SENTRY_DSN }}' -X 'main.buildMode=${{ needs.resolve-env.outputs.environment }}'" \ + -o "$BINARY_NAME" . + + - name: Codesign + if: needs.resolve-env.outputs.environment == 'prod' + env: + APPLE_CERTIFICATE_BASE64: ${{ secrets.APPLE_CERTIFICATE_BASE64 }} + APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} + run: | + # Import certificate + CERT_PATH="$RUNNER_TEMP/certificate.p12" + KEYCHAIN_PATH="$RUNNER_TEMP/signing.keychain-db" + KEYCHAIN_PASSWORD="$(openssl rand -hex 16)" + + echo "$APPLE_CERTIFICATE_BASE64" | base64 --decode > "$CERT_PATH" + + security create-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + security set-keychain-settings -lut 21600 "$KEYCHAIN_PATH" + security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + security import "$CERT_PATH" -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k "$KEYCHAIN_PATH" + security set-key-partition-list -S apple-tool:,apple: -k "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + security list-keychains -d user -s "$KEYCHAIN_PATH" login.keychain + + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + + # Sign the binary + codesign --force --options runtime \ + --sign "Developer ID Application: DeepSource Corp" \ + "cmd/deepsource/${BINARY_NAME}" + + - name: Notarize + if: needs.resolve-env.outputs.environment == 'prod' + env: + APPLE_ID: ${{ secrets.APPLE_ID }} + APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} + APPLE_APP_PASSWORD: ${{ secrets.APPLE_APP_PASSWORD }} + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + + # Create zip for notarization + ditto -c -k "cmd/deepsource/${BINARY_NAME}" notarize.zip + xcrun notarytool submit notarize.zip \ + --apple-id "$APPLE_ID" \ + --team-id "$APPLE_TEAM_ID" \ + --password "$APPLE_APP_PASSWORD" \ + --wait + + - name: Verify signing + if: needs.resolve-env.outputs.environment == 'prod' + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + codesign --verify --verbose "cmd/deepsource/${BINARY_NAME}" + spctl --assess --type execute "cmd/deepsource/${BINARY_NAME}" || true + + - name: Package + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + ARCHIVE="${BINARY_NAME}_${{ needs.resolve-env.outputs.version }}_darwin_${{ matrix.goarch }}.tar.gz" + tar -czf "$ARCHIVE" -C cmd/deepsource "$BINARY_NAME" + shasum -a 256 "$ARCHIVE" > "${ARCHIVE}.sha256" + + - uses: actions/upload-artifact@v4 + with: + name: build-darwin-${{ matrix.goarch }} + path: | + ${{ needs.resolve-env.outputs.binary_name }}_*.tar.gz + ${{ needs.resolve-env.outputs.binary_name }}_*.tar.gz.sha256 + + build-windows: + needs: resolve-env + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Install cross-compiler + run: sudo apt-get update && sudo apt-get install -y gcc-mingw-w64-x86-64 + + - name: Build + env: + CGO_ENABLED: '1' + GOOS: windows + GOARCH: amd64 + CC: x86_64-w64-mingw32-gcc + CXX: x86_64-w64-mingw32-g++ + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + cd cmd/deepsource && go build -tags static_all \ + -ldflags "-X 'main.version=${{ needs.resolve-env.outputs.version }}' -X 'main.Date=$(date -u +%Y-%m-%d)' -X 'main.SentryDSN=${{ secrets.SENTRY_DSN }}' -X 'main.buildMode=${{ needs.resolve-env.outputs.environment }}'" \ + -o "${BINARY_NAME}.exe" . + + - name: Package + run: | + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + ARCHIVE="${BINARY_NAME}_${{ needs.resolve-env.outputs.version }}_windows_amd64.zip" + zip "$ARCHIVE" -j "cmd/deepsource/${BINARY_NAME}.exe" + sha256sum "$ARCHIVE" > "${ARCHIVE}.sha256" + + - uses: actions/upload-artifact@v4 + with: + name: build-windows-amd64 + path: | + ${{ needs.resolve-env.outputs.binary_name }}_*.zip + ${{ needs.resolve-env.outputs.binary_name }}_*.zip.sha256 + + deploy: + needs: [resolve-env, build-linux, build-darwin, build-windows] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v4 + with: + path: artifacts + merge-multiple: true + + - name: Generate manifest + run: | + VERSION="${{ needs.resolve-env.outputs.version }}" + BINARY_NAME="${{ needs.resolve-env.outputs.binary_name }}" + BUILD_TIME="$(date -u +%Y-%m-%dT%H:%M:%SZ)" + + get_sha() { + cat "artifacts/${1}.sha256" | awk '{print $1}' + } + + cat > artifacts/manifest.json < artifacts/install + + - name: Upload to R2 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: auto + run: | + ENDPOINT="https://${{ secrets.CLOUDFLARE_ACCOUNT_ID }}.r2.cloudflarestorage.com" + if [ "${{ needs.resolve-env.outputs.environment }}" = "prod" ]; then + BUCKET="${{ secrets.R2_PROD_BUCKET_NAME }}" + else + BUCKET="${{ secrets.R2_DEV_BUCKET_NAME }}" + fi + + # Prod stores under cli/ prefix; dev serves bucket root via subdomain + if [ "${{ needs.resolve-env.outputs.environment }}" = "prod" ]; then + PREFIX="cli/" + else + PREFIX="" + fi + + # Upload archives and checksums (immutable) + for f in artifacts/${{ needs.resolve-env.outputs.binary_name }}_*; do + aws s3 cp "$f" "s3://${BUCKET}/${PREFIX}build/$(basename "$f")" \ + --endpoint-url "$ENDPOINT" \ + --cache-control "public, max-age=31536000, immutable" + done + + # Upload manifest and install script (short cache) + for f in artifacts/manifest.json artifacts/install; do + aws s3 cp "$f" "s3://${BUCKET}/${PREFIX}$(basename "$f")" \ + --endpoint-url "$ENDPOINT" \ + --cache-control "public, max-age=60" + done + + publish-homebrew: + needs: [resolve-env, deploy] + if: needs.resolve-env.outputs.environment == 'prod' + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v4 + with: + path: artifacts + merge-multiple: true + + - name: Generate formula + run: | + VERSION="${{ needs.resolve-env.outputs.version }}" + BASE="${{ needs.resolve-env.outputs.base_url }}/build" + + DARWIN_AMD64_SHA="$(cat artifacts/deepsource_${VERSION}_darwin_amd64.tar.gz.sha256 | awk '{print $1}')" + DARWIN_ARM64_SHA="$(cat artifacts/deepsource_${VERSION}_darwin_arm64.tar.gz.sha256 | awk '{print $1}')" + LINUX_AMD64_SHA="$(cat artifacts/deepsource_${VERSION}_linux_amd64.tar.gz.sha256 | awk '{print $1}')" + LINUX_ARM64_SHA="$(cat artifacts/deepsource_${VERSION}_linux_arm64.tar.gz.sha256 | awk '{print $1}')" + + cat > deepsource.rb <> .release-env - echo 'HOMEBREW_TOKEN=${{secrets.DS_BOT_PAT}}' >> .release-env - echo 'DEEPSOURCE_CLI_SENTRY_DSN=${{secrets.SENTRY_DSN}}' >> .release-env - - - name: Publish Release - run: make release diff --git a/Makefile b/Makefile deleted file mode 100644 index aa7a28f1..00000000 --- a/Makefile +++ /dev/null @@ -1,69 +0,0 @@ -PACKAGE_NAME := github.com/deepsourcelabs/cli -GOLANG_CROSS_VERSION ?= v1.21.6 - -SYSROOT_DIR ?= sysroots -SYSROOT_ARCHIVE ?= sysroots.tar.bz2 - -build: - cd cmd/deepsource && GOOS=linux GOARCH=amd64 go build -tags static_all -o /tmp/deepsource . - -build_local: - cd cmd/deepsource && go build -tags static_all -o /tmp/deepsource . - -test: - CGO_ENABLED=1 go test -v ./command/report/tests/... -count=1 - echo "\n====TESTING DEEPSOURCE PACKAGE====\n" - CGO_ENABLED=1 go test -v ./deepsource/tests/... - echo "\n====TESTING CONFIG VALIDATOR PACKAGE====\n" - go test -v ./configvalidator/... -count=1 - echo "\n====CALCULATING TEST COVERAGE FOR ENTIRE PACKAGE====\n" - go test -v -coverprofile=coverage.out -count=1 ./... - -test_setup: - mkdir -p ${CODE_PATH} - cd ${CODE_PATH} && ls -A1 | xargs rm -rf - git clone https://github.com/DeepSourceCorp/july ${CODE_PATH} - chmod +x /tmp/deepsource - cp ./command/report/tests/golden_files/python_coverage.xml /tmp - -.PHONY: sysroot-pack -sysroot-pack: - @tar cf - $(SYSROOT_DIR) -P | pv -s $[$(du -sk $(SYSROOT_DIR) | awk '{print $1}') * 1024] | pbzip2 > $(SYSROOT_ARCHIVE) - -.PHONY: sysroot-unpack -sysroot-unpack: - @pv $(SYSROOT_ARCHIVE) | pbzip2 -cd | tar -xf - - -.PHONY: release-dry-run -release-dry-run: - @if [ ! -f ".release-env" ]; then \ - echo "\033[91m.release-env is required for release\033[0m";\ - exit 1;\ - fi - @docker run \ - --rm \ - -e CGO_ENABLED=1 \ - --env-file .release-env \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v `pwd`:/go/src/$(PACKAGE_NAME) \ - -v `pwd`/sysroot:/sysroot \ - -w /go/src/$(PACKAGE_NAME) \ - ghcr.io/goreleaser/goreleaser-cross:${GOLANG_CROSS_VERSION} \ - release --clean --skip-publish --skip-validate - -.PHONY: release -release: - @if [ ! -f ".release-env" ]; then \ - echo "\033[91m.release-env is required for release\033[0m";\ - exit 1;\ - fi - docker run \ - --rm \ - -e CGO_ENABLED=1 \ - --env-file .release-env \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v `pwd`:/go/src/$(PACKAGE_NAME) \ - -v `pwd`/sysroot:/sysroot \ - -w /go/src/$(PACKAGE_NAME) \ - ghcr.io/goreleaser/goreleaser-cross:${GOLANG_CROSS_VERSION} \ - release --clean diff --git a/README.md b/README.md index 73737f01..f1871cf1 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,28 @@ This script will detect the operating system and architecture and puts deepsourc In order to report test-coverage to DeepSource using the `report` command, an environment variable named as `DEEPSOURCE_DSN` has to be set. It's value will be available under 'Settings' tab of the repository page. +## Authentication + +### Browser-based login (default) +```sh +deepsource auth login +``` + +### Token-based login +```sh +deepsource auth login --token +``` + +### Enterprise (custom hostname) +```sh +deepsource auth login --host your-enterprise.deepsource.com +``` + +### OIDC-based reporting (CI environments) +```sh +deepsource report --analyzer test-coverage --key go --value-file ./coverage.out --use-oidc +``` + ## Usage The CLI provides access to a wide range of commands. Here is a list of the @@ -48,16 +70,16 @@ commands along with their brief description. ``` Usage: - deepsource [] + deepsource [flags] Available commands are: - report Report an artifact to an analyzer - config Generate and Validate DeepSource config - help Help about any command - issues Show the list of issues in a file in a repository - repo Operations related to the project repository - report Report artifacts to DeepSource - version Get the version of the DeepSource CLI + auth Authentication commands (login, logout, refresh, status) + analysis View analysis runs + issues View issues in a repository + repo Operations related to the project repository (status, view) + report Report artifacts to DeepSource + metrics View repository metrics + vulnerabilities View vulnerabilities in a repository Help: Use 'deepsource --help/-h' for more information about the command. diff --git a/VERSION b/VERSION new file mode 100644 index 00000000..815e68dd --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +2.0.8 diff --git a/version/version.go b/buildinfo/version.go similarity index 74% rename from version/version.go rename to buildinfo/version.go index 992fbbc2..41b7563d 100644 --- a/version/version.go +++ b/buildinfo/version.go @@ -1,4 +1,4 @@ -package version +package buildinfo import ( "fmt" @@ -7,6 +7,14 @@ import ( var buildInfo *BuildInfo +// App identity variables. Defaults are prod values; overridden in main.go for dev builds. +var ( + AppName = "deepsource" // binary name / display name + ConfigDirName = ".deepsource" // ~// + KeychainSvc = "deepsource-cli" // macOS keychain service + KeychainKey = "deepsource-cli-token" // macOS keychain account +) + // BuildInfo describes the compile time information. type BuildInfo struct { // Version is the current semver. diff --git a/version/version_test.go b/buildinfo/version_test.go similarity index 98% rename from version/version_test.go rename to buildinfo/version_test.go index 067f27ea..df9029d7 100644 --- a/version/version_test.go +++ b/buildinfo/version_test.go @@ -1,4 +1,4 @@ -package version +package buildinfo import ( "reflect" diff --git a/cmd/deepsource/main.go b/cmd/deepsource/main.go index d25f8ed1..8a34548f 100644 --- a/cmd/deepsource/main.go +++ b/cmd/deepsource/main.go @@ -1,12 +1,16 @@ package main import ( + "errors" + "fmt" "log" "os" + "strings" "time" + v "github.com/deepsourcelabs/cli/buildinfo" "github.com/deepsourcelabs/cli/command" - v "github.com/deepsourcelabs/cli/version" + clierrors "github.com/deepsourcelabs/cli/internal/errors" "github.com/getsentry/sentry-go" "github.com/pterm/pterm" ) @@ -20,24 +24,60 @@ var ( // DSN used for sentry SentryDSN string + + // buildMode is "dev" or "prod" (default). Set via ldflags -X. + buildMode string ) +func sentryEnvironment(ver string) string { + if strings.HasPrefix(ver, "v") || strings.Contains(ver, ".") { + return "production" + } + return "development" +} + func main() { log.SetFlags(log.LstdFlags | log.Lshortfile) + // Override app identity for dev builds + if buildMode == "dev" { + v.AppName = "deepsource-dev" + v.ConfigDirName = ".deepsource-dev" + v.KeychainSvc = "deepsource-dev-cli" + v.KeychainKey = "deepsource-dev-cli-token" + } + // Init sentry err := sentry.Init(sentry.ClientOptions{ - Dsn: SentryDSN, + Dsn: SentryDSN, + Release: v.AppName + "-cli@" + version, + Environment: sentryEnvironment(version), }) if err != nil { log.Println("Could not load sentry.") } + + defer func() { + if r := recover(); r != nil { + sentry.CurrentHub().Recover(r) + sentry.Flush(2 * time.Second) + fmt.Fprintf(os.Stderr, "fatal: unexpected panic: %v\n", r) + os.Exit(2) + } + }() + v.SetBuildInfo(version, Date, "", "") if err := command.Execute(); err != nil { - // TODO: Handle exit codes here - pterm.Error.Println(err) - sentry.CaptureException(err) + var cliErr *clierrors.CLIError + if errors.As(err, &cliErr) { + pterm.Error.Println(cliErr.Message) + } else { + pterm.Error.Println(err) + } + if !clierrors.IsUserError(err) { + sentry.CaptureException(err) + } sentry.Flush(2 * time.Second) os.Exit(1) } diff --git a/command/analysis/analysis.go b/command/analysis/analysis.go new file mode 100644 index 00000000..39558d5f --- /dev/null +++ b/command/analysis/analysis.go @@ -0,0 +1,412 @@ +package analysis + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + "strings" + "time" + + "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/deepsource/runs" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + "github.com/deepsourcelabs/cli/internal/vcs" + "github.com/pterm/pterm" + "github.com/spf13/cobra" +) + +type AnalysisOptions struct { + RepoArg string + LimitArg int + OutputFormat string + commitOid string + deps *cmddeps.Deps +} + +func (opts *AnalysisOptions) stdout() io.Writer { + if opts.deps != nil && opts.deps.Stdout != nil { + return opts.deps.Stdout + } + return os.Stdout +} + +func NewCmdAnalysis() *cobra.Command { + return NewCmdAnalysisWithDeps(nil) +} + +func NewCmdAnalysisWithDeps(deps *cmddeps.Deps) *cobra.Command { + opts := AnalysisOptions{ + LimitArg: 20, + deps: deps, + } + + doc := heredoc.Docf(` + View analysis runs for a repository. + + Lists recent analysis runs by default: + %[1]s + + Use %[2]s to scope to a specific repository: + %[3]s + + Use %[4]s to show run metadata and issues summary: + %[5]s + `, + style.Cyan("deepsource analysis"), + style.Yellow("--repo"), + style.Cyan("deepsource analysis --repo repo_name"), + style.Yellow("--commit"), + style.Cyan("deepsource analysis --commit abc123f"), + ) + + cmd := &cobra.Command{ + Use: "analysis [flags]", + Short: "View analysis runs", + Long: doc, + RunE: func(cmd *cobra.Command, args []string) error { + if opts.commitOid != "" { + return opts.runDetail(cmd.Context()) + } + return opts.runList() + }, + } + + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "List history for the specified repository") + cmd.Flags().IntVarP(&opts.LimitArg, "limit", "l", 20, "Number of analysis runs to fetch") + cmd.Flags().StringVar(&opts.commitOid, "commit", "", "Show metadata and issues summary for a specific commit") + cmd.Flags().StringVarP(&opts.OutputFormat, "output", "o", "pretty", "Output format: pretty, json") + + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed output", + "json\tJSON output", + }, cobra.ShellCompDirectiveNoFileComp + }) + + return cmd +} + +// runList fetches and displays a table of recent analysis runs. +func (opts *AnalysisOptions) runList() error { + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + cfg, err := cfgMgr.Load() + if err != nil { + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return err + } + + remote, err := vcs.ResolveRemote(opts.RepoArg) + if err != nil { + return err + } + + var client *deepsource.Client + if opts.deps != nil && opts.deps.Client != nil { + client = opts.deps.Client + } else { + client, err = deepsource.New(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: cfgMgr.TokenRefreshCallback(), + }) + if err != nil { + return err + } + } + + ctx := context.Background() + analysisRuns, err := client.GetAnalysisRuns(ctx, remote.Owner, remote.RepoName, remote.VCSProvider, opts.LimitArg) + if err != nil { + return err + } + + if len(analysisRuns) == 0 { + pterm.Info.Println("No analysis runs found for this repository.") + return nil + } + + if opts.OutputFormat == "json" { + return opts.outputRunsJSON(analysisRuns) + } + + showRunsTable(analysisRuns) + return nil +} + +// runDetail fetches and displays metadata + issues summary for a single commit. +func (opts *AnalysisOptions) runDetail(ctx context.Context) error { + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + cfg, err := cfgMgr.Load() + if err != nil { + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return err + } + + var client *deepsource.Client + if opts.deps != nil && opts.deps.Client != nil { + client = opts.deps.Client + } else { + client, err = deepsource.New(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: cfgMgr.TokenRefreshCallback(), + }) + if err != nil { + return err + } + } + + commitOid := opts.commitOid + runWithIssues, err := client.GetRunIssues(ctx, commitOid) + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to fetch run details", err) + } + + if opts.OutputFormat == "json" { + return opts.outputRunDetailJSON(runWithIssues) + } + + commitShort := commitOid + if len(commitShort) > 8 { + commitShort = commitShort[:8] + } + + pterm.DefaultBox.WithTitle("Analysis Run").WithTitleTopCenter().Println( + fmt.Sprintf("%s %s\n%s %s\n%s %s", + pterm.Bold.Sprint("Commit:"), + commitShort, + pterm.Bold.Sprint("Branch:"), + runWithIssues.BranchName, + pterm.Bold.Sprint("Status:"), + formatStatus(runWithIssues.Status), + ), + ) + + showIssuesSummary(runWithIssues.Issues) + + pterm.Println() + pterm.Info.Printfln("Run %s to view full issue details", + style.Cyan("deepsource issues --commit %s", commitShort)) + + return nil +} + +// --- JSON output --- + +type AnalysisRunJSON struct { + CommitOid string `json:"commit_oid"` + BranchName string `json:"branch_name"` + Status string `json:"status"` + OccurrencesIntroduced int `json:"occurrences_introduced"` + OccurrencesResolved int `json:"occurrences_resolved"` + OccurrencesSuppressed int `json:"occurrences_suppressed"` + FinishedAt *time.Time `json:"finished_at"` +} + +type RunDetailJSON struct { + CommitOid string `json:"commit_oid"` + BranchName string `json:"branch_name"` + Status string `json:"status"` + Issues []RunIssueJSON `json:"issues"` +} + +type RunIssueJSON struct { + Path string `json:"path"` + Title string `json:"title"` + Code string `json:"code"` + Category string `json:"category"` + Severity string `json:"severity"` +} + +func (opts *AnalysisOptions) outputRunsJSON(analysisRuns []runs.AnalysisRun) error { + result := make([]AnalysisRunJSON, 0, len(analysisRuns)) + for _, run := range analysisRuns { + result = append(result, AnalysisRunJSON{ + CommitOid: run.CommitOid, + BranchName: run.BranchName, + Status: run.Status, + OccurrencesIntroduced: run.OccurrencesIntroduced, + OccurrencesResolved: run.OccurrencesResolved, + OccurrencesSuppressed: run.OccurrencesSuppressed, + FinishedAt: run.FinishedAt, + }) + } + data, err := json.MarshalIndent(result, "", " ") + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format JSON output", err) + } + fmt.Fprintln(opts.stdout(), string(data)) + return nil +} + +func (opts *AnalysisOptions) outputRunDetailJSON(runWithIssues *runs.RunWithIssues) error { + issuesJSON := make([]RunIssueJSON, 0, len(runWithIssues.Issues)) + for _, issue := range runWithIssues.Issues { + issuesJSON = append(issuesJSON, RunIssueJSON{ + Path: issue.Path, + Title: issue.Title, + Code: issue.IssueCode, + Category: issue.Category, + Severity: issue.Severity, + }) + } + result := RunDetailJSON{ + CommitOid: runWithIssues.CommitOid, + BranchName: runWithIssues.BranchName, + Status: runWithIssues.Status, + Issues: issuesJSON, + } + data, err := json.MarshalIndent(result, "", " ") + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format JSON output", err) + } + fmt.Fprintln(opts.stdout(), string(data)) + return nil +} + +// --- Display helpers --- + +func showRunsTable(analysisRuns []runs.AnalysisRun) { + header := []string{"Commit", "Branch", "Status", "Introduced", "Resolved", "Suppressed", "Finished"} + data := [][]string{header} + + for _, run := range analysisRuns { + commitShort := run.CommitOid + if len(commitShort) > 8 { + commitShort = commitShort[:8] + } + + branch := run.BranchName + if branch == "" { + branch = "-" + } + + status := formatStatus(run.Status) + introduced := fmt.Sprintf("%d", run.OccurrencesIntroduced) + resolved := fmt.Sprintf("%d", run.OccurrencesResolved) + suppressed := fmt.Sprintf("%d", run.OccurrencesSuppressed) + + finished := "-" + if run.FinishedAt != nil { + finished = formatTime(*run.FinishedAt) + } + + data = append(data, []string{ + commitShort, + branch, + status, + introduced, + resolved, + suppressed, + finished, + }) + } + + pterm.DefaultTable.WithHasHeader().WithData(data).Render() +} + +func showIssuesSummary(issues []runs.RunIssue) { + if len(issues) == 0 { + pterm.Println() + pterm.Success.Println("No issues found in this run") + return + } + + var critical, major, minor int + for _, issue := range issues { + switch strings.ToUpper(issue.Severity) { + case "CRITICAL": + critical++ + case "MAJOR": + major++ + case "MINOR": + minor++ + } + } + + pterm.Println() + pterm.Println(pterm.Bold.Sprintf("Issues: %d total", len(issues))) + + parts := []string{} + if critical > 0 { + parts = append(parts, pterm.Red(fmt.Sprintf("%d critical", critical))) + } + if major > 0 { + parts = append(parts, pterm.LightRed(fmt.Sprintf("%d major", major))) + } + if minor > 0 { + parts = append(parts, pterm.Yellow(fmt.Sprintf("%d minor", minor))) + } + if len(parts) > 0 { + pterm.Println(" " + strings.Join(parts, ", ")) + } +} + +func formatStatus(status string) string { + switch strings.ToUpper(status) { + case "SUCCESS": + return pterm.Green("Success") + case "FAILURE": + return pterm.Red("Failure") + case "PENDING": + return pterm.Yellow("Pending") + case "RUNNING": + return pterm.Cyan("Running") + default: + return status + } +} + +func formatTime(t time.Time) string { + now := time.Now() + diff := now.Sub(t) + + switch { + case diff < time.Minute: + return "just now" + case diff < time.Hour: + mins := int(diff.Minutes()) + if mins == 1 { + return "1 min ago" + } + return fmt.Sprintf("%d mins ago", mins) + case diff < 24*time.Hour: + hours := int(diff.Hours()) + if hours == 1 { + return "1 hour ago" + } + return fmt.Sprintf("%d hours ago", hours) + case diff < 7*24*time.Hour: + days := int(diff.Hours() / 24) + if days == 1 { + return "1 day ago" + } + return fmt.Sprintf("%d days ago", days) + default: + return t.Format("Jan 2, 2006") + } +} diff --git a/command/analysis/tests/analysis_test.go b/command/analysis/tests/analysis_test.go new file mode 100644 index 00000000..347e83ec --- /dev/null +++ b/command/analysis/tests/analysis_test.go @@ -0,0 +1,77 @@ +package tests + +import ( + "bytes" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/command/cmddeps" + analysisCmd "github.com/deepsourcelabs/cli/command/analysis" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/internal/testutil" +) + +func goldenPath(name string) string { + _, callerFile, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(callerFile), "golden_files", name) +} + +func TestAnalysisListRuns(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "GetAnalysisRuns": goldenPath("runs_list_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := analysisCmd.NewCmdAnalysisWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("runs_list_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestAnalysisRunDetail(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "GetRunIssues": goldenPath("run_detail_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := analysisCmd.NewCmdAnalysisWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--commit", "abc123f", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("run_detail_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} diff --git a/command/analysis/tests/golden_files/run_detail_output.json b/command/analysis/tests/golden_files/run_detail_output.json new file mode 100644 index 00000000..6ad155d7 --- /dev/null +++ b/command/analysis/tests/golden_files/run_detail_output.json @@ -0,0 +1,21 @@ +{ + "commit_oid": "abc123f", + "branch_name": "main", + "status": "SUCCESS", + "issues": [ + { + "path": "cmd/server/main.go", + "title": "Error return value not checked", + "code": "GO-R1005", + "category": "BUG_RISK", + "severity": "MAJOR" + }, + { + "path": "internal/handler/auth.go", + "title": "Potential SQL injection", + "code": "GO-S1001", + "category": "SECURITY", + "severity": "CRITICAL" + } + ] +} diff --git a/command/analysis/tests/golden_files/run_detail_response.json b/command/analysis/tests/golden_files/run_detail_response.json new file mode 100644 index 00000000..e27a2852 --- /dev/null +++ b/command/analysis/tests/golden_files/run_detail_response.json @@ -0,0 +1,52 @@ +{ + "run": { + "runUid": "run-uid-001", + "commitOid": "abc123f", + "branchName": "main", + "status": "SUCCESS", + "checks": { + "edges": [ + { + "node": { + "analyzer": { + "name": "Go", + "shortcode": "go" + }, + "issues": { + "edges": [ + { + "node": { + "source": "static", + "path": "cmd/server/main.go", + "beginLine": 42, + "beginColumn": 5, + "endLine": 42, + "endColumn": 30, + "title": "Error return value not checked", + "shortcode": "GO-R1005", + "category": "BUG_RISK", + "severity": "MAJOR" + } + }, + { + "node": { + "source": "ai", + "path": "internal/handler/auth.go", + "beginLine": 15, + "beginColumn": 1, + "endLine": 20, + "endColumn": 2, + "title": "Potential SQL injection", + "shortcode": "GO-S1001", + "category": "SECURITY", + "severity": "CRITICAL" + } + } + ] + } + } + } + ] + } + } +} diff --git a/command/analysis/tests/golden_files/runs_list_output.json b/command/analysis/tests/golden_files/runs_list_output.json new file mode 100644 index 00000000..4986a5e6 --- /dev/null +++ b/command/analysis/tests/golden_files/runs_list_output.json @@ -0,0 +1,20 @@ +[ + { + "commit_oid": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", + "branch_name": "main", + "status": "SUCCESS", + "occurrences_introduced": 3, + "occurrences_resolved": 1, + "occurrences_suppressed": 0, + "finished_at": "2025-01-15T10:30:00Z" + }, + { + "commit_oid": "b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3", + "branch_name": "feature/auth", + "status": "PENDING", + "occurrences_introduced": 0, + "occurrences_resolved": 0, + "occurrences_suppressed": 0, + "finished_at": null + } +] diff --git a/command/analysis/tests/golden_files/runs_list_response.json b/command/analysis/tests/golden_files/runs_list_response.json new file mode 100644 index 00000000..46883026 --- /dev/null +++ b/command/analysis/tests/golden_files/runs_list_response.json @@ -0,0 +1,38 @@ +{ + "repository": { + "analysisRuns": { + "edges": [ + { + "node": { + "runUid": "run-uid-001", + "commitOid": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", + "branchName": "main", + "status": "SUCCESS", + "createdAt": "2025-01-15T10:00:00Z", + "finishedAt": "2025-01-15T10:30:00Z", + "summary": { + "occurrencesIntroduced": 3, + "occurrencesResolved": 1, + "occurrencesSuppressed": 0 + } + } + }, + { + "node": { + "runUid": "run-uid-002", + "commitOid": "b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3", + "branchName": "feature/auth", + "status": "PENDING", + "createdAt": "2025-01-16T08:00:00Z", + "finishedAt": null, + "summary": { + "occurrencesIntroduced": 0, + "occurrencesResolved": 0, + "occurrencesSuppressed": 0 + } + } + } + ] + } + } +} diff --git a/command/auth/auth.go b/command/auth/auth.go index 182aaccf..3299dc64 100644 --- a/command/auth/auth.go +++ b/command/auth/auth.go @@ -5,14 +5,13 @@ import ( "github.com/deepsourcelabs/cli/command/auth/login" "github.com/deepsourcelabs/cli/command/auth/logout" - "github.com/deepsourcelabs/cli/command/auth/refresh" "github.com/deepsourcelabs/cli/command/auth/status" ) // Options holds the metadata. type Options struct{} -// NewCmdAuth handles the auth command which has various sub-commands like `login`, `logout`, `refresh` and `status` +// NewCmdAuth handles the auth command which has various sub-commands like `login`, `logout` and `status` func NewCmdAuth() *cobra.Command { cmd := &cobra.Command{ Use: "auth", @@ -20,7 +19,6 @@ func NewCmdAuth() *cobra.Command { } cmd.AddCommand(login.NewCmdLogin()) cmd.AddCommand(logout.NewCmdLogout()) - cmd.AddCommand(refresh.NewCmdRefresh()) cmd.AddCommand(status.NewCmdStatus()) return cmd } diff --git a/command/auth/login/login.go b/command/auth/login/login.go index fb8d1b1b..7d3cc326 100644 --- a/command/auth/login/login.go +++ b/command/auth/login/login.go @@ -5,11 +5,14 @@ import ( "github.com/MakeNowJust/heredoc" "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/utils" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/prompt" + "github.com/deepsourcelabs/cli/internal/cli/style" + authsvc "github.com/deepsourcelabs/cli/internal/services/auth" "github.com/spf13/cobra" ) -var accountTypes = []string{"DeepSource (deepsource.io)", "DeepSource Enterprise"} +var accountTypes = []string{"DeepSource (deepsource.com)", "Enterprise Server"} // LoginOptions hold the metadata related to login operation type LoginOptions struct { @@ -34,7 +37,7 @@ func NewCmdLogin() *cobra.Command { Use %[3]s to authenticate with a specific DeepSource instance, for example: %[4]s - `, utils.Yellow("--with-token"), utils.Cyan("deepsource auth login --with-token dsp_abcd"), utils.Yellow("--hostname"), utils.Cyan("deepsource auth login --hostname my_instance")) + `, style.Yellow("--with-token"), style.Cyan("deepsource auth login --with-token dsp_abcd"), style.Yellow("--hostname"), style.Cyan("deepsource auth login --hostname my_instance")) opts := LoginOptions{ AuthTimedOut: false, @@ -47,7 +50,7 @@ func NewCmdLogin() *cobra.Command { Use: "login", Short: "Log in to DeepSource using Command Line Interface", Long: doc, - Args: utils.NoArgs, + Args: args.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { return opts.Run() }, @@ -63,8 +66,12 @@ func NewCmdLogin() *cobra.Command { // Run executes the auth command and starts the login flow if not already authenticated func (opts *LoginOptions) Run() (err error) { - // Fetch config - cfg, _ := config.GetConfig() + svc := authsvc.NewService(config.DefaultManager()) + // Fetch config (errors are non-fatal: a zero config just means "not logged in") + cfg, err := svc.LoadConfig() + if err != nil { + cfg = &config.CLIConfig{} + } opts.User = cfg.User opts.TokenExpired = cfg.IsExpired() @@ -77,7 +84,7 @@ func (opts *LoginOptions) Run() (err error) { } // Checking if the user passed a hostname. If yes, storing it in the config - // Else using the default hostname (deepsource.io) + // Else using the default hostname (deepsource.com) if opts.HostName != "" { cfg.Host = opts.HostName } else { @@ -92,7 +99,7 @@ func (opts *LoginOptions) Run() (err error) { if !opts.TokenExpired { // The user is already logged in, confirm re-authentication. msg := fmt.Sprintf("You're already logged into DeepSource as %s. Do you want to re-authenticate?", opts.User) - response, err := utils.ConfirmFromUser(msg, "") + response, err := prompt.ConfirmFromUser(msg, "") if err != nil { return fmt.Errorf("Error in fetching response. Please try again.") } @@ -105,12 +112,12 @@ func (opts *LoginOptions) Run() (err error) { // If PAT is passed, start the login flow through PAT if opts.PAT != "" { - return opts.startPATLoginFlow(cfg, opts.PAT) + return opts.startPATLoginFlow(svc, cfg, opts.PAT) } // Condition 2 // `startLoginFlow` implements the authentication flow for the CLI - return opts.startLoginFlow(cfg) + return opts.startLoginFlow(svc, cfg) } func (opts *LoginOptions) handleInteractiveLogin() error { @@ -121,13 +128,13 @@ func (opts *LoginOptions) handleInteractiveLogin() error { hostPromptHelpText := "The hostname of the DeepSource instance to authenticate with" // Display prompt to user - loginType, err := utils.SelectFromOptions(loginPromptMessage, loginPromptHelpText, accountTypes) + loginType, err := prompt.SelectFromOptions(loginPromptMessage, loginPromptHelpText, accountTypes) if err != nil { return err } // Prompt the user for hostname only in the case of on-premise - if loginType == "DeepSource Enterprise" { - opts.HostName, err = utils.GetSingleLineInput(hostPromptMessage, hostPromptHelpText) + if loginType == "Enterprise Server" { + opts.HostName, err = prompt.GetSingleLineInput(hostPromptMessage, hostPromptHelpText) if err != nil { return err } diff --git a/command/auth/login/login_flow.go b/command/auth/login/login_flow.go index 87f7ff75..1a58732f 100644 --- a/command/auth/login/login_flow.go +++ b/command/auth/login/login_flow.go @@ -9,77 +9,66 @@ import ( "github.com/cli/browser" "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/deepsource" "github.com/deepsourcelabs/cli/deepsource/auth" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + authsvc "github.com/deepsourcelabs/cli/internal/services/auth" "github.com/fatih/color" ) // Starts the login flow for the CLI -func (opts *LoginOptions) startLoginFlow(cfg *config.CLIConfig) error { +func (opts *LoginOptions) startLoginFlow(svc *authsvc.Service, cfg *config.CLIConfig) error { // Register the device and get a device code through the response ctx := context.Background() - deviceRegistrationResponse, err := registerDevice(ctx) + deviceRegistrationResponse, err := registerDevice(ctx, svc, cfg) if err != nil { return err } - // Print the user code and the permission to open browser at verificationURI - c := color.New(color.FgCyan, color.Bold) - c.Printf("Please copy your one-time code: %s\n", deviceRegistrationResponse.UserCode) - c.Printf("Press enter to open deepsource.io in your browser...") - fmt.Scanln() - - // Having received the user code, open the browser at verificationURIComplete + // Open the browser for authentication err = browser.OpenURL(deviceRegistrationResponse.VerificationURIComplete) if err != nil { - return err + c := color.New(color.FgCyan, color.Bold) + c.Printf("Open this URL in your browser to authenticate:\n") + } else { + fmt.Println("Opened the authentication page in your browser.") + c := color.New(color.FgCyan, color.Bold) + c.Printf("If the browser didn't open, visit this URL to authenticate:\n") } + fmt.Println(deviceRegistrationResponse.VerificationURIComplete) + fmt.Println() + fmt.Println("Waiting for authentication") - // Fetch the PAT using the device registration resonse + // Fetch the PAT by polling the server var tokenData *auth.PAT - tokenData, opts.AuthTimedOut, err = fetchPAT(ctx, deviceRegistrationResponse) + tokenData, opts.AuthTimedOut, err = fetchPAT(ctx, deviceRegistrationResponse, svc, cfg) if err != nil { return err } - // Check if it was a success poll or the Auth timed out if opts.AuthTimedOut { - return fmt.Errorf("Authentication timed out") + return clierrors.NewCLIError(clierrors.ErrAuthRequired, "Authentication timed out. Please try again", nil) } - // Storing the useful data for future reference and usage - // in a global config object (Cfg) + // Store auth data in config cfg.User = tokenData.User.Email cfg.Token = tokenData.Token cfg.SetTokenExpiry(tokenData.Expiry) - // Having stored the data in the global Cfg object, write it into the config file present in the local filesystem - err = cfg.WriteFile() + err = svc.SaveConfig(cfg) if err != nil { - return fmt.Errorf("Error in writing authentication data to a file. Exiting...") + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Failed to save authentication data", err) } + + c := color.New(color.FgGreen, color.Bold) + c.Printf("Logged in as %s\n", tokenData.User.Email) return nil } -func registerDevice(ctx context.Context) (*auth.Device, error) { - // Fetching DeepSource client in order to interact with SDK - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return nil, err - } - - // Send a mutation to register device and get the device code - res, err := deepsource.RegisterDevice(ctx) - if err != nil { - return nil, err - } - return res, nil +func registerDevice(ctx context.Context, svc *authsvc.Service, cfg *config.CLIConfig) (*auth.Device, error) { + return svc.RegisterDevice(ctx, cfg) } -func fetchPAT(ctx context.Context, deviceRegistrationData *auth.Device) (*auth.PAT, bool, error) { +func fetchPAT(ctx context.Context, deviceRegistrationData *auth.Device, svc *authsvc.Service, cfg *config.CLIConfig) (*auth.PAT, bool, error) { var tokenData *auth.PAT var err error defaultUserName := "user" @@ -103,15 +92,6 @@ func fetchPAT(ctx context.Context, deviceRegistrationData *auth.Device) (*auth.P } userDescription := fmt.Sprintf("CLI PAT for %s@%s", userName, hostName) - // Fetching DeepSource client in order to interact with SDK - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return nil, authTimedOut, err - } - // Keep polling the mutation at a certain interval till the expiry timeperiod ticker := time.NewTicker(time.Duration(deviceRegistrationData.Interval) * time.Second) pollStartTime := time.Now() @@ -119,7 +99,7 @@ func fetchPAT(ctx context.Context, deviceRegistrationData *auth.Device) (*auth.P // Polling for fetching PAT func() { for range ticker.C { - tokenData, err = deepsource.Login(ctx, deviceRegistrationData.Code, userDescription) + tokenData, err = svc.RequestPAT(ctx, cfg, deviceRegistrationData.Code, userDescription) if err == nil { authTimedOut = false return diff --git a/command/auth/login/pat_login_flow.go b/command/auth/login/pat_login_flow.go index edefaf8d..3d0b7bc4 100644 --- a/command/auth/login/pat_login_flow.go +++ b/command/auth/login/pat_login_flow.go @@ -4,15 +4,16 @@ import ( "fmt" "github.com/deepsourcelabs/cli/config" + authsvc "github.com/deepsourcelabs/cli/internal/services/auth" ) // Starts the login flow for the CLI (using PAT) -func (opts *LoginOptions) startPATLoginFlow(cfg *config.CLIConfig, token string) error { +func (opts *LoginOptions) startPATLoginFlow(svc *authsvc.Service, cfg *config.CLIConfig, token string) error { // set personal access token (PAT) cfg.Token = token // Having stored the data in the global Cfg object, write it into the config file present in the local filesystem - err := cfg.WriteFile() + err := svc.SaveConfig(cfg) if err != nil { return fmt.Errorf("Error in writing authentication data to a file. Exiting...") } diff --git a/command/auth/logout/logout.go b/command/auth/logout/logout.go index 6d9e3e82..878f7936 100644 --- a/command/auth/logout/logout.go +++ b/command/auth/logout/logout.go @@ -2,10 +2,12 @@ package logout import ( "errors" - "fmt" "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/utils" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/prompt" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + authsvc "github.com/deepsourcelabs/cli/internal/services/auth" "github.com/pterm/pterm" "github.com/spf13/cobra" ) @@ -17,7 +19,7 @@ func NewCmdLogout() *cobra.Command { cmd := &cobra.Command{ Use: "logout", Short: "Logout of your active DeepSource account", - Args: utils.NoArgs, + Args: args.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { opts := LogoutOptions{} return opts.Run() @@ -27,10 +29,11 @@ func NewCmdLogout() *cobra.Command { } func (opts *LogoutOptions) Run() error { + svc := authsvc.NewService(config.DefaultManager()) // Fetch config - cfg, err := config.GetConfig() + cfg, err := svc.LoadConfig() if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) } // Checking if the user has authenticated / logged in or not if cfg.Token == "" { @@ -39,18 +42,17 @@ func (opts *LogoutOptions) Run() error { // Confirm from the user if they want to logout logoutConfirmationMsg := "Are you sure you want to log out of DeepSource account?" - response, err := utils.ConfirmFromUser(logoutConfirmationMsg, "") + response, err := prompt.ConfirmFromUser(logoutConfirmationMsg, "") if err != nil { return err } // If response is true, delete the config file => logged out the user if response { - err := cfg.Delete() - if err != nil { + if err := svc.DeleteConfig(); err != nil { return err } } - pterm.Info.Println("Logged out from DeepSource (deepsource.io)") + pterm.Println("Logged out from DeepSource (deepsource.com)") return nil } diff --git a/command/auth/refresh/refresh.go b/command/auth/refresh/refresh.go deleted file mode 100644 index b71592d3..00000000 --- a/command/auth/refresh/refresh.go +++ /dev/null @@ -1,81 +0,0 @@ -package refresh - -import ( - "context" - "errors" - "fmt" - - "github.com/MakeNowJust/heredoc" - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/utils" - "github.com/pterm/pterm" - "github.com/spf13/cobra" -) - -type RefreshOptions struct{} - -// NewCmdRefresh handles the refreshing of authentication credentials -func NewCmdRefresh() *cobra.Command { - doc := heredoc.Docf(` - Refresh stored authentication credentials. - - Authentication credentials expire after a certain amount of time. - - To renew the authentication credentials, use %[1]s - `, utils.Yellow("deepsource auth refresh")) - - opts := RefreshOptions{} - - cmd := &cobra.Command{ - Use: "refresh", - Short: "Refresh stored authentication credentials", - Long: doc, - Args: utils.NoArgs, - RunE: func(cmd *cobra.Command, args []string) error { - return opts.Run() - }, - } - return cmd -} - -func (opts *RefreshOptions) Run() error { - // Fetch config - cfg, err := config.GetConfig() - if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) - } - // Checking if the user has authenticated / logged in or not - if cfg.Token == "" { - return errors.New("You are not logged into DeepSource. Run \"deepsource auth login\" to authenticate.") - } - - // Fetching DS Client - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return err - } - ctx := context.Background() - // Use the SDK to fetch the new auth data - refreshedConfigData, err := deepsource.RefreshAuthCreds(ctx, cfg.Token) - if err != nil { - return err - } - - // Convert incoming config into the local CLI config format - cfg.User = refreshedConfigData.User.Email - cfg.Token = refreshedConfigData.Token - cfg.SetTokenExpiry(refreshedConfigData.Expiry) - - // Having formatted the data, write it to the config file - err = cfg.WriteFile() - if err != nil { - fmt.Println("Error in writing authentication data to a file. Exiting...") - return err - } - pterm.Info.Println("Authentication successfully refreshed.") - return nil -} diff --git a/command/auth/status/status.go b/command/auth/status/status.go index 40af9ce4..39862755 100644 --- a/command/auth/status/status.go +++ b/command/auth/status/status.go @@ -2,32 +2,41 @@ package status import ( "errors" - "fmt" "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/utils" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/style" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + authsvc "github.com/deepsourcelabs/cli/internal/services/auth" "github.com/pterm/pterm" "github.com/spf13/cobra" ) -type AuthStatusOptions struct{} +type AuthStatusOptions struct { + deps *cmddeps.Deps +} // NewCmdStatus handles the fetching of authentication status of CLI func NewCmdStatus() *cobra.Command { + return NewCmdStatusWithDeps(nil) +} + +func NewCmdStatusWithDeps(deps *cmddeps.Deps) *cobra.Command { doc := heredoc.Docf(` View the authentication status. To check the authentication status, use %[1]s - `, utils.Cyan("deepsource auth status")) + `, style.Cyan("deepsource auth status")) cmd := &cobra.Command{ Use: "status", Short: "View the authentication status", Long: doc, - Args: utils.NoArgs, + Args: args.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { - opts := AuthStatusOptions{} + opts := AuthStatusOptions{deps: deps} return opts.Run() }, } @@ -35,10 +44,17 @@ func NewCmdStatus() *cobra.Command { } func (opts *AuthStatusOptions) Run() error { + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + svc := authsvc.NewService(cfgMgr) // Fetch config - cfg, err := config.GetConfig() + cfg, err := svc.LoadConfig() if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) } // Checking if the user has authenticated / logged in or not if cfg.Token == "" { @@ -47,9 +63,9 @@ func (opts *AuthStatusOptions) Run() error { // Check if the token has already expired if !cfg.IsExpired() { - pterm.Info.Printf("Logged in to DeepSource as %s.\n", cfg.User) + pterm.Printf("Logged in to DeepSource as %s.\n", cfg.User) } else { - pterm.Info.Println("The authentication has expired. Run \"deepsource auth refresh\" to refresh the credentials.") + pterm.Println("The authentication has expired. Run \"deepsource auth login\" to re-authenticate.") } return nil } diff --git a/command/auth/status/tests/auth_status_test.go b/command/auth/status/tests/auth_status_test.go new file mode 100644 index 00000000..b2e5c9f7 --- /dev/null +++ b/command/auth/status/tests/auth_status_test.go @@ -0,0 +1,82 @@ +package tests + +import ( + "strings" + "testing" + "time" + + "github.com/deepsourcelabs/cli/command/cmddeps" + statusCmd "github.com/deepsourcelabs/cli/command/auth/status" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/secrets" +) + +func createConfigManager(t *testing.T, token, host, user string, expiry time.Time) *config.Manager { + t.Helper() + tmpDir := t.TempDir() + fs := adapters.NewOSFileSystem() + mgr := config.NewManagerWithSecrets(fs, func() (string, error) { + return tmpDir, nil + }, secrets.NoopStore{}, "") + + cfg := &config.CLIConfig{ + Token: token, + Host: host, + User: user, + TokenExpiresIn: expiry, + } + if err := mgr.Write(cfg); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + return mgr +} + +func TestAuthStatusLoggedIn(t *testing.T) { + cfgMgr := createConfigManager(t, "test-token", "deepsource.com", "user@example.com", time.Now().Add(24*time.Hour)) + + deps := &cmddeps.Deps{ + ConfigMgr: cfgMgr, + } + + cmd := statusCmd.NewCmdStatusWithDeps(deps) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + // pterm output goes to real stdout in this case, but at least we verify no error +} + +func TestAuthStatusNotLoggedIn(t *testing.T) { + cfgMgr := createConfigManager(t, "", "deepsource.com", "", time.Time{}) + + deps := &cmddeps.Deps{ + ConfigMgr: cfgMgr, + } + + cmd := statusCmd.NewCmdStatusWithDeps(deps) + + err := cmd.Execute() + if err == nil { + t.Fatal("expected error for not logged in, got nil") + } + + if !strings.Contains(err.Error(), "not logged into DeepSource") { + t.Errorf("expected not-logged-in error message, got: %s", err.Error()) + } +} + +func TestAuthStatusExpired(t *testing.T) { + cfgMgr := createConfigManager(t, "test-token", "deepsource.com", "user@example.com", time.Now().Add(-24*time.Hour)) + + deps := &cmddeps.Deps{ + ConfigMgr: cfgMgr, + } + + cmd := statusCmd.NewCmdStatusWithDeps(deps) + + // Expired token should not error, just prints a message + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } +} diff --git a/command/cmddeps/deps.go b/command/cmddeps/deps.go new file mode 100644 index 00000000..f6c8387b --- /dev/null +++ b/command/cmddeps/deps.go @@ -0,0 +1,19 @@ +package cmddeps + +import ( + "io" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + reposvc "github.com/deepsourcelabs/cli/internal/services/repo" +) + +// Deps holds injectable dependencies for commands, enabling testability. +// When nil or when individual fields are nil, commands fall back to +// production defaults. +type Deps struct { + Client *deepsource.Client + ConfigMgr *config.Manager + Stdout io.Writer + RepoService *reposvc.Service +} diff --git a/command/config/config.go b/command/config/config.go deleted file mode 100644 index 7aea4391..00000000 --- a/command/config/config.go +++ /dev/null @@ -1,22 +0,0 @@ -package config - -import ( - "github.com/deepsourcelabs/cli/command/config/generate" - "github.com/deepsourcelabs/cli/command/config/validate" - "github.com/spf13/cobra" -) - -// Options holds the metadata. -type Options struct{} - -// NewCmdVersion returns the current version of cli being used -func NewCmdConfig() *cobra.Command { - cmd := &cobra.Command{ - Use: "config ", - Short: "Generate and Validate DeepSource config", - } - cmd.AddCommand(generate.NewCmdConfigGenerate()) - cmd.AddCommand(validate.NewCmdValidate()) - - return cmd -} diff --git a/command/config/generate/analyzers_input.go b/command/config/generate/analyzers_input.go deleted file mode 100644 index 5a2cbbe1..00000000 --- a/command/config/generate/analyzers_input.go +++ /dev/null @@ -1,165 +0,0 @@ -package generate - -import ( - "log" - - "github.com/Jeffail/gabs/v2" - "github.com/deepsourcelabs/cli/utils" -) - -// Struct to hold the data regarding the compulsary meta fields as required by analyzers -// Also, the userinput for that field -type AnalyzerMetadata struct { - FieldName string - Type string - Title string - Description string - Options []string - UserInput string -} - -// ========== -// Analyzers Input Prompt -// ========== -func (o *Options) collectAnalyzerInput() (err error) { - // Extracting languages and tools being used in the project for Analyzers - analyzerPromptMsg := "Which languages/tools does your project use?" - analyzerPromptHelpText := "Analyzers will find issues in your code. Add an analyzer by selecting a language you've written your code in." - - o.ActivatedAnalyzers, err = utils.SelectFromMultipleOptions(analyzerPromptMsg, analyzerPromptHelpText, utils.AnalyzersData.AnalyzerNames) - if err != nil { - return err - } - - // Extract the compulsary analyzer meta for analyzers - err = o.extractRequiredAnalyzerMetaFields() - if err != nil { - return err - } - - return nil -} - -// Checks if the field is present in the array containing list of `optional_required` -// analyzer meta fields -func isContains(requiredFieldsList []string, field string) bool { - for _, v := range requiredFieldsList { - if field == v { - return true - } - } - return false -} - -// Uses the `survey` prompt API to gather user input and store in `Options` struct -// `Options` struct is later used for config generation -func (o *Options) inputAnalyzerMeta(requiredFieldsData map[string][]AnalyzerMetadata) (err error) { - // Iterate over the map and fetch the input for the fields from the user - for analyzer, metaFields := range requiredFieldsData { - for i := 0; i < len(metaFields); i++ { - switch metaFields[i].Type { - case "boolean": - metaFields[i].UserInput = "true" - res, err := utils.ConfirmFromUser(metaFields[i].Title, metaFields[i].Description) - if err != nil { - return err - } - if !res { - metaFields[i].UserInput = "false" - } - case "enum": - metaFields[i].UserInput, err = utils.SelectFromOptions(metaFields[i].Title, metaFields[i].Description, metaFields[i].Options) - if err != nil { - return err - } - default: - metaFields[i].UserInput, err = utils.GetSingleLineInput(metaFields[i].Title, metaFields[i].Description) - if err != nil { - return err - } - } - } - requiredFieldsData[analyzer] = metaFields - } - o.AnalyzerMetaMap = requiredFieldsData - return nil -} - -// Extracts the fields that are compulsary according to the meta schema and require input -func populateMetadata(optionalFields []string, jsonParsed *gabs.Container) []AnalyzerMetadata { - requiredFieldsData := make([]AnalyzerMetadata, 0) - - // Iterate through the properties using the parsed json (jsonParsed) and extract the data of the - // required analyzer meta fields - for key, child := range jsonParsed.Search("properties").ChildrenMap() { - if !isContains(optionalFields, key) { - continue - } - propertyJSON, err := gabs.ParseJSON(child.Bytes()) - if err != nil { - log.Printf("Error occured while parsing analyzer meta property: %v\n", err) - continue - } - - individualFieldRequiredData := AnalyzerMetadata{ - FieldName: key, - Type: propertyJSON.Search("type").Data().(string), - Title: propertyJSON.Search("title").Data().(string), - Description: propertyJSON.Search("description").Data().(string), - } - - // Check for enum property - for _, child := range propertyJSON.Search("enum").Children() { - individualFieldRequiredData.Options = append(individualFieldRequiredData.Options, child.Data().(string)) - individualFieldRequiredData.Type = "enum" - } - - // Check for items property - itemsPath := propertyJSON.Path("items") - itemsJSON, _ := gabs.ParseJSON(itemsPath.Bytes()) - for _, child := range itemsJSON.Search("enum").Children() { - individualFieldRequiredData.Options = append(individualFieldRequiredData.Options, child.Data().(string)) - individualFieldRequiredData.Type = "enum" - } - requiredFieldsData = append(requiredFieldsData, individualFieldRequiredData) - } - return requiredFieldsData -} - -// The primary function to parse the API response of meta schema and filter out the `optional_required` fields -// Calls helper functions (mentioned above) to perform the required meta data extraction -// and handling prompt for inputting these fields -func (o *Options) extractRequiredAnalyzerMetaFields() error { - var optionalFields []string - var requiredMetaData []AnalyzerMetadata - analyzerFieldsData := make(map[string][]AnalyzerMetadata) - - // Extract `optional_required` fields of analyzer meta of selected analyzers - for _, activatedAnalyzer := range o.ActivatedAnalyzers { - analyzerShortcode := utils.AnalyzersData.AnalyzersMap[activatedAnalyzer] - // Assigning optional fields to nil before checking for an analyzer - optionalFields = nil - requiredMetaData = nil - - analyzerMeta := utils.AnalyzersData.AnalyzersMetaMap[analyzerShortcode] - // Parse the analyzer meta of the analyzer using `gabs` - jsonParsed, err := gabs.ParseJSON([]byte(analyzerMeta)) - if err != nil { - log.Printf("Error occured while parsing meta for %s analyzer.\n", activatedAnalyzer) - return err - } - - // Search for "optional_required" fields in the meta-schema - for _, child := range jsonParsed.Search("optional_required").Children() { - optionalFields = append(optionalFields, child.Data().(string)) - } - // Move on to next analyzer if no "optional_required" fields found - if optionalFields == nil { - continue - } - // Extract the the data to be input for all the required analyzer meta properties - requiredMetaData = populateMetadata(optionalFields, jsonParsed) - analyzerFieldsData[activatedAnalyzer] = requiredMetaData - } - return o.inputAnalyzerMeta(analyzerFieldsData) -} diff --git a/command/config/generate/constants.go b/command/config/generate/constants.go deleted file mode 100644 index 96475763..00000000 --- a/command/config/generate/constants.go +++ /dev/null @@ -1,3 +0,0 @@ -package generate - -const DEEPSOURCE_TOML_VERSION = 1 diff --git a/command/config/generate/generate.go b/command/config/generate/generate.go deleted file mode 100644 index 4c719dbe..00000000 --- a/command/config/generate/generate.go +++ /dev/null @@ -1,157 +0,0 @@ -package generate - -import ( - "bytes" - "fmt" - "os" - "path/filepath" - - "github.com/MakeNowJust/heredoc" - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/utils" - "github.com/fatih/color" - toml "github.com/pelletier/go-toml" - "github.com/spf13/cobra" -) - -// Options holds the metadata. -type Options struct { - ActivatedAnalyzers []string // Analyzers activated by user - AnalyzerMetaMap map[string][]AnalyzerMetadata - ActivatedTransformers []string // Transformers activated by the user - ExcludePatterns []string - TestPatterns []string - GeneratedConfig string -} - -// NewCmdConfigGenerate handles the generation of DeepSource config based on user inputs -func NewCmdConfigGenerate() *cobra.Command { - o := Options{} - - home, _ := os.UserHomeDir() - doc := heredoc.Docf(` - Generate config for the DeepSource CLI. - - Configs are stored in: %[1]s - `, utils.Cyan(filepath.Join(home, "deepsource", "config.toml"))) - - cmd := &cobra.Command{ - Use: "generate", - Short: "Generate config for DeepSource", - Long: doc, - Args: utils.NoArgs, - RunE: func(cmd *cobra.Command, args []string) error { - return o.Run() - }, - } - return cmd -} - -// Run executes the command. -func (o *Options) Run() error { - // Fetch config - cfg, err := config.GetConfig() - if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) - } - err = cfg.VerifyAuthentication() - if err != nil { - return err - } - - // Step 1: Collect user input - err = o.collectUserInput() - if err != nil { - fmt.Println("\nError occured while collecting input.Exiting...") - return err - } - - // Step 2: Generates config based on user input - err = o.generateDeepSourceConfig() - if err != nil { - fmt.Println("\nError occured while generating config from input.Exiting...") - return err - } - - // Step 3: Write the generated config to a file - err = o.writeConfigToFile() - if err != nil { - fmt.Println("\nError while writing config to project directory. Exiting...") - return err - } - - // Step 4: If everything is successfull, print the success message - cwd, _ := os.Getwd() - c := color.New(color.FgGreen) - successOutput := fmt.Sprintf("\nSuccessfully generated DeepSource config file at %s/.deepsource.toml", cwd) - c.Println(successOutput) - - return nil -} - -// Generates DeepSource config based on the inputs from the user in Options struct -func (o *Options) generateDeepSourceConfig() error { - // Copying version, exclude_patterns and test_patterns into the DSConfig based structure - config := DSConfig{ - Version: DEEPSOURCE_TOML_VERSION, - ExcludePatterns: o.ExcludePatterns, - TestPatterns: o.TestPatterns, - } - - // Copying activated analyzers from Options struct to DSConfig based "config" struct - for _, analyzer := range o.ActivatedAnalyzers { - // Configuring the analyzer meta data - metaMap := make(map[string]interface{}) - if o.AnalyzerMetaMap[analyzer] != nil { - for _, meta := range o.AnalyzerMetaMap[analyzer] { - metaMap[meta.FieldName] = meta.UserInput - } - } - - activatedAnalyzerData := Analyzer{ - Name: utils.AnalyzersData.AnalyzersMap[analyzer], - Enabled: true, - } - if len(metaMap) != 0 { - activatedAnalyzerData.Meta = metaMap - } - config.Analyzers = append(config.Analyzers, activatedAnalyzerData) - } - - // Copying activated transformers from Options struct to DSConfig based "config" struct - for _, transformer := range o.ActivatedTransformers { - config.Transformers = append(config.Transformers, Transformer{ - Name: utils.TransformersData.TransformerMap[transformer], - Enabled: true, - }) - } - - // Encoding the DSConfig based "config" struct to TOML - // and storing in GeneratedConfig of Options struct - var buf bytes.Buffer - err := toml.NewEncoder(&buf).Order(toml.OrderPreserve).Encode(config) - if err != nil { - return err - } - // Convert the TOML encoded buffer to string - o.GeneratedConfig = buf.String() - return nil -} - -// Writes the generated TOML config into a file -func (o *Options) writeConfigToFile() error { - // Creating file - cwd, _ := os.Getwd() - f, err := os.Create(filepath.Join(cwd, ".deepsource.toml")) - if err != nil { - return err - } - defer f.Close() - - // Writing the string to the file - _, writeError := f.WriteString(o.GeneratedConfig) - if writeError != nil { - return writeError - } - return nil -} diff --git a/command/config/generate/generic_input.go b/command/config/generate/generic_input.go deleted file mode 100644 index bb147319..00000000 --- a/command/config/generate/generic_input.go +++ /dev/null @@ -1,116 +0,0 @@ -package generate - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/AlecAivazis/survey/v2" - "github.com/deepsourcelabs/cli/utils" -) - -// ========== -// Exclude Patterns Input Prompt -// ========== -func (o *Options) collectExcludePatterns() error { - excludePatternsMsg := "Would you like to add any exclude patterns?" - helpMsg := "Glob patterns of files that should not be analyzed such as auto-generated files, migrations, compatibility files." - - // Confirm from the user if they want to add an exclude pattern - response, err := utils.ConfirmFromUser(excludePatternsMsg, helpMsg) - if err != nil { - return err - } - - // If yes, keep entering patterns until they input n/N - if response { - err := o.inputFilePatterns("exclude", "Select exclude pattern", helpMsg) - if err != nil { - return err - } - } - return nil -} - -// ========== -// Test Patterns Input Prompt -// ========== -func (o *Options) collectTestPatterns() error { - testPatternsMsg := "Would you like to add any test patterns?" - helpMsg := "Glob patterns of the test files. This helps us reduce false positives." - - // Confirm from the user (y/N) if he/she wants to add test patterns - response, err := utils.ConfirmFromUser(testPatternsMsg, helpMsg) - if err != nil { - return err - } - - // If yes, keep entering patterns until they input n/N - if response { - err := o.inputFilePatterns("test", "Select test pattern", helpMsg) - if err != nil { - return err - } - } - return nil -} - -// Single utility function to help in inputting test as well as exclude patterns -// Keeps asking user for pattern and then confirms if they want to add more patterns -// Exits when user enters No (n/N) -func (o *Options) inputFilePatterns(field, msg, helpMsg string) error { - // Infinite loop to keep running until user wants to stop inputting - for { - var filePattern string - - // Input the pattern - filePatternsPrompt := &survey.Input{ - Renderer: survey.Renderer{}, - Message: msg, - Default: "", - Help: helpMsg, - Suggest: getMatchingFiles, - } - err := survey.AskOne(filePatternsPrompt, &filePattern) - if err != nil { - return err - } - - // Having taken the input of exclude_patterns/test_pattern, append it to the Options struct - if field == "test" { - o.TestPatterns = append(o.TestPatterns, filePattern) - } else { - o.ExcludePatterns = append(o.ExcludePatterns, filePattern) - } - - // Confirm from the user if the user wants to add more patterns - // Iterating this until user says no - // Here field contains : "test"/"exclude" depending upon the invoking - confirmationMsg := fmt.Sprintf("Add more %s patterns?", field) - response, err := utils.ConfirmFromUser(confirmationMsg, "") - if err != nil { - return err - } - if !response { - break - } - } - return nil -} - -// Receives a filepath and returns matching dirs and files -// Used for autocompleting input of "exclude_patterns" and "test_patterns" -func getMatchingFiles(path string) []string { - // Geting matching dirs and files using glob - files, _ := filepath.Glob(path + "*") - cwd, _ := os.Getwd() - - // Iterating over files and appending "/" to directories - for index, file := range files { - fileInfo, _ := os.Stat(filepath.Join(cwd, file)) - if fileInfo.IsDir() { - files[index] = files[index] + "/" - } - } - return files -} diff --git a/command/config/generate/input.go b/command/config/generate/input.go deleted file mode 100644 index abbd45d1..00000000 --- a/command/config/generate/input.go +++ /dev/null @@ -1,50 +0,0 @@ -package generate - -import ( - "context" - - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/utils" -) - -// Responsible for collecting user input for generating DeepSource config -func (o *Options) collectUserInput() error { - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return err - } - ctx := context.Background() - - // Get the list of analyzers and transformers supported by DeepSource - err = utils.GetAnalyzersAndTransformersData(ctx, *deepsource) - if err != nil { - return err - } - - // Get input for analyzers to be activated - err = o.collectAnalyzerInput() - if err != nil { - return err - } - - err = o.collectTransformersInput() - if err != nil { - return err - } - - err = o.collectExcludePatterns() - if err != nil { - return err - } - - err = o.collectTestPatterns() - if err != nil { - return err - } - - return nil -} diff --git a/command/config/generate/transformers_input.go b/command/config/generate/transformers_input.go deleted file mode 100644 index 6e5a2468..00000000 --- a/command/config/generate/transformers_input.go +++ /dev/null @@ -1,18 +0,0 @@ -package generate - -import "github.com/deepsourcelabs/cli/utils" - -// ========== -// Transformers Input Prompt -// ========== -func (o *Options) collectTransformersInput() (err error) { - transformerPromptMsg := "Would you like to activate any Transformers for any languages?" - transformerPromptHelpText := "DeepSource Transformers automatically help to achieve auto-formatting of code. Add a transformer by selecting the code formatting tool of your choice." - - o.ActivatedTransformers, err = utils.SelectFromMultipleOptions(transformerPromptMsg, transformerPromptHelpText, utils.TransformersData.TransformerNames) - if err != nil { - return err - } - - return nil -} diff --git a/command/config/generate/types.go b/command/config/generate/types.go deleted file mode 100644 index a60d7a19..00000000 --- a/command/config/generate/types.go +++ /dev/null @@ -1,24 +0,0 @@ -package generate - -// DSConfig is the struct for .deepsource.toml file -type Analyzer struct { - Name string `toml:"name,omitempty" json:"name,omitempty"` - RuntimeVersion string `toml:"runtime_version,omitempty" json:"runtime_version,omitempty"` - Enabled bool `toml:"enabled" json:"enabled"` - DependencyFilePaths []string `toml:"dependency_file_paths,omitempty" json:"dependency_file_paths,omitempty"` - Meta interface{} `toml:"meta,omitempty" json:"meta,omitempty"` - Thresholds interface{} `toml:"thresholds,omitempty" json:"thresholds,omitempty"` -} - -type Transformer struct { - Name string `toml:"name" json:"name"` - Enabled bool `toml:"enabled" json:"enabled"` -} - -type DSConfig struct { - Version int `toml:"version" json:"version"` - ExcludePatterns []string `toml:"exclude_patterns" json:"exclude_patterns,omitempty"` - TestPatterns []string `toml:"test_patterns" json:"test_patterns,omitempty"` - Analyzers []Analyzer `toml:"analyzers,omitempty" json:"analyzers,omitempty"` - Transformers []Transformer `toml:"transformers,omitempty" json:"transformers,omitempty"` -} diff --git a/command/config/validate/validate.go b/command/config/validate/validate.go deleted file mode 100644 index 500cb50a..00000000 --- a/command/config/validate/validate.go +++ /dev/null @@ -1,201 +0,0 @@ -package validate - -import ( - "context" - "errors" - "fmt" - "io/ioutil" - "os" - "os/exec" - "path/filepath" - "strconv" - "strings" - - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/configvalidator" - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/utils" - "github.com/pterm/pterm" - "github.com/spf13/cobra" -) - -// Options holds the metadata. -type Options struct{} - -// NewCmdValidate handles the validation of the DeepSource config (.deepsource.toml) -// Internally it uses the package `configvalidator` to validate the config -func NewCmdValidate() *cobra.Command { - o := Options{} - cmd := &cobra.Command{ - Use: "validate", - Short: "Validate DeepSource config", - Args: utils.NoArgs, - RunE: func(cmd *cobra.Command, args []string) error { - return o.Run() - }, - } - return cmd -} - -// Run executes the command. -func (o *Options) Run() error { - // Fetch config - cfg, err := config.GetConfig() - if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) - } - err = cfg.VerifyAuthentication() - if err != nil { - return err - } - - // Just an info - pterm.Info.Println("DeepSource config (.deepsource.toml) is mostly present in the root directory of the project.") - fmt.Println() - - // Extract the path of DeepSource config - configPath, err := extractDSConfigPath() - if err != nil { - return err - } - - // Read the config in the form of string and send it - content, err := ioutil.ReadFile(configPath) - if err != nil { - return errors.New("Error occured while reading DeepSource config file. Exiting...") - } - - // Fetch the client - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return err - } - ctx := context.Background() - // Fetch the list of supported analyzers and transformers' data - // using the SDK - err = utils.GetAnalyzersAndTransformersData(ctx, *deepsource) - if err != nil { - return err - } - - // Create an instance of ConfigValidator struct - var validator configvalidator.ConfigValidator - // Send the config contents to get validated - var result configvalidator.Result = validator.ValidateConfig(content) - - // Checking for all types of errors (due to viper/valid errors/no errors) - // and handling them - if result.ConfigReadError { - // handle printing viper error here - printViperError(content, result.Errors) - } else if !result.Valid { - // handle printing other errors here - printConfigErrors(result.Errors) - } else { - printValidConfig() - } - - return nil -} - -// Extracts the path of DeepSource config (.deepsource.toml) in the user repo -// Checks in the current working directory as well as the root directory -// of the project -func extractDSConfigPath() (string, error) { - var configPath string - - // Get current working directory of user from where this command is run - cwd, err := os.Getwd() - if err != nil { - return "", errors.New("Error occured while fetching current working directory. Exiting...") - } - - // Form the full path of cwd to search for .deepsource.toml - configPath = filepath.Join(cwd, ".deepsource.toml") - - // Check if there is a deepsource.toml file here - if _, err = os.Stat(configPath); err != nil { - // Since, no .deepsource.toml in the cwd, - // fetching the top level directory - output, err := exec.Command("git", "rev-parse", "--show-toplevel").Output() - if err != nil { - return "", err - } - - // Removing trailing null characters - path := strings.TrimRight(string(output), "\000\n") - - // Check if the config exists on this path - if _, err = os.Stat(filepath.Join(path, ".deepsource.toml")); err != nil { - return "", errors.New("Error occured while looking for DeepSource config file. Exiting...") - } else { - // If found, use this as configpath - configPath = filepath.Join(path, "/.deepsource.toml") - } - } - return configPath, nil -} - -// Handles printing the output when viper fails to read TOML file due to bad syntax -func printViperError(fileContent []byte, errors []string) { - var errorString string - var errorLine int - - // Parsing viper error output and finding at which line bad syntax is present in - // DeepSource config TOML file - for _, error := range errors { - stripString1 := strings.Split(error, ": ") - errorString = stripString1[2] - errorLine, _ = strconv.Atoi(strings.Trim(strings.Split(stripString1[1], ", ")[0], "(")) - } - - // Read .deepsource.toml line by line and store in a var - lineText := strings.Split(string(fileContent), "\n") - fileLength := len(lineText) - - // Print error message - pterm.Error.WithShowLineNumber(false).Printf("Error while reading config : %s\n", errorString) - pterm.Println() - - // Preparing codeframe to show exactly at which line bad syntax is present in TOML file - if errorLine > 2 && errorLine+2 <= fileLength { - for i := errorLine - 2; i <= errorLine+2; i++ { - if i == errorLine { - errorStr := "" - if i >= 10 { - errorStr = fmt.Sprintf("> %d | %s", i, lineText[i-1]) - } else { - errorStr = fmt.Sprintf("> %d | %s", i, lineText[i-1]) - } - pterm.NewStyle(pterm.FgLightRed).Println(errorStr) - } else { - errorStr := "" - if i >= 10 { - errorStr = fmt.Sprintf(" %d | %s", i, lineText[i-1]) - } else { - errorStr = fmt.Sprintf(" %d | %s", i, lineText[i-1]) - } - pterm.NewStyle(pterm.FgLightYellow).Println(errorStr) - - } - } - } else { - errorStr := fmt.Sprintf("> %d | %s", errorLine, lineText[errorLine-1]) - pterm.NewStyle(pterm.FgLightRed).Println(errorStr) - } -} - -// Handles printing the errors in the DeepSource config (.deepsource.toml) -func printConfigErrors(errors []string) { - for _, error := range errors { - pterm.Error.WithShowLineNumber(false).Println(error) - } -} - -// Handles printing the valid config output -func printValidConfig() { - pterm.Success.Println("Config Valid") -} diff --git a/command/issues/issues.go b/command/issues/issues.go index 2d383b5d..447814d7 100644 --- a/command/issues/issues.go +++ b/command/issues/issues.go @@ -1,20 +1,582 @@ package issues import ( - "github.com/spf13/cobra" + "context" + "encoding/json" + "fmt" + "io" + "os" + "strings" - "github.com/deepsourcelabs/cli/command/issues/list" + "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/deepsource/issues" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + "github.com/deepsourcelabs/cli/internal/vcs" + "github.com/pterm/pterm" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" ) -// Options holds the metadata. -type Options struct{} +type IssuesOptions struct { + RepoArg string + LimitArg int + OutputFormat string + OutputFile string + Verbose bool + AnalyzerFilters []string + CategoryFilters []string + SeverityFilters []string + CodeFilters []string + PathFilters []string + SourceFilters []string + CommitOid string + PRNumber int + repoSlug string + issues []issues.Issue + deps *cmddeps.Deps +} + +func (opts *IssuesOptions) stdout() io.Writer { + if opts.deps != nil && opts.deps.Stdout != nil { + return opts.deps.Stdout + } + return os.Stdout +} -// NewCmdVersion returns the current version of cli being used func NewCmdIssues() *cobra.Command { + return NewCmdIssuesWithDeps(nil) +} + +func NewCmdIssuesWithDeps(deps *cmddeps.Deps) *cobra.Command { + opts := IssuesOptions{ + LimitArg: 30, + OutputFormat: "pretty", + deps: deps, + } + + doc := heredoc.Docf(` + View issues in a repository. + + Lists issues from the default branch by default: + %[1]s + + Scope to a specific commit or pull request: + %[2]s + %[3]s + + Output as a table or structured format: + %[4]s + %[5]s + `, + style.Cyan("deepsource issues"), + style.Cyan("deepsource issues --commit abc123f"), + style.Cyan("deepsource issues --pr 123"), + style.Cyan("deepsource issues --output table"), + style.Cyan("deepsource issues --output json"), + ) + cmd := &cobra.Command{ - Use: "issues", - Short: "Show the list of issues in a file in a repository", + Use: "issues [flags]", + Short: "View issues in a repository", + Long: doc, + RunE: func(cmd *cobra.Command, args []string) error { + return opts.Run(cmd.Context()) + }, } - cmd.AddCommand(list.NewCmdIssuesList()) + + // --repo, -r flag + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Repository to list issues for (owner/name)") + + // --limit, -l flag + cmd.Flags().IntVarP(&opts.LimitArg, "limit", "l", 30, "Maximum number of issues to fetch") + + // --output, -o flag + cmd.Flags().StringVarP(&opts.OutputFormat, "output", "o", "pretty", "Output format: pretty, table, json, yaml") + + // --output-file flag + cmd.Flags().StringVar(&opts.OutputFile, "output-file", "", "Write output to a file instead of stdout") + + // --verbose, -v flag + cmd.Flags().BoolVarP(&opts.Verbose, "verbose", "v", false, "Show issue code, analyzer, and description") + + // Scoping flags + cmd.Flags().StringVar(&opts.CommitOid, "commit", "", "Scope to a specific analysis run by commit OID") + cmd.Flags().IntVar(&opts.PRNumber, "pr", 0, "Scope to a specific pull request by number") + + // Filter flags + cmd.Flags().StringSliceVar(&opts.AnalyzerFilters, "analyzer", nil, "Filter by analyzer shortcode (e.g. python,go)") + cmd.Flags().StringSliceVar(&opts.CategoryFilters, "category", nil, "Filter by category (e.g. security,bug-risk)") + cmd.Flags().StringSliceVar(&opts.SeverityFilters, "severity", nil, "Filter by severity (e.g. critical,major)") + cmd.Flags().StringSliceVar(&opts.CodeFilters, "code", nil, "Filter by issue code (e.g. GO-R1005)") + cmd.Flags().StringSliceVar(&opts.PathFilters, "path", nil, "Filter by path substring (e.g. cmd/,internal/)") + cmd.Flags().StringSliceVar(&opts.SourceFilters, "source", nil, "Filter by source (static, ai)") + + // Completions + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed grouped output", + "table\tTable output", + "json\tJSON output", + "yaml\tYAML output", + }, cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("category", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "anti-pattern", + "bug-risk", + "performance", + "security", + "coverage", + "typecheck", + "style", + "documentation", + }, cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("severity", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"critical", "major", "minor"}, cobra.ShellCompDirectiveNoFileComp + }) + + // Mutual exclusivity + cmd.MarkFlagsMutuallyExclusive("commit", "pr") + return cmd } + +func (opts *IssuesOptions) Run(ctx context.Context) error { + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + cfg, err := cfgMgr.Load() + if err != nil { + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return err + } + + remote, err := vcs.ResolveRemote(opts.RepoArg) + if err != nil { + return err + } + opts.repoSlug = remote.Owner + "/" + remote.RepoName + + var client *deepsource.Client + if opts.deps != nil && opts.deps.Client != nil { + client = opts.deps.Client + } else { + client, err = deepsource.New(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: cfgMgr.TokenRefreshCallback(), + }) + if err != nil { + return err + } + } + + var issuesList []issues.Issue + switch { + case opts.CommitOid != "": + issuesList, err = client.GetRunIssuesFlat(ctx, opts.CommitOid, opts.LimitArg) + case opts.PRNumber > 0: + issuesList, err = client.GetPRIssues(ctx, remote.Owner, remote.RepoName, remote.VCSProvider, opts.PRNumber, opts.LimitArg) + default: + issuesList, err = client.GetIssues(ctx, remote.Owner, remote.RepoName, remote.VCSProvider, opts.LimitArg) + } + if err != nil { + return err + } + + issuesList = opts.filterIssues(issuesList) + opts.issues = issuesList + + switch opts.OutputFormat { + case "json": + return opts.outputJSON() + case "yaml": + return opts.outputYAML() + case "table": + return opts.outputTable() + default: + return opts.outputHuman() + } +} + +// --- Filters --- + +func (opts *IssuesOptions) hasFilters() bool { + return len(opts.AnalyzerFilters) > 0 || + len(opts.CategoryFilters) > 0 || + len(opts.SeverityFilters) > 0 || + len(opts.CodeFilters) > 0 || + len(opts.PathFilters) > 0 || + len(opts.SourceFilters) > 0 +} + +func (opts *IssuesOptions) filterIssues(issuesList []issues.Issue) []issues.Issue { + if !opts.hasFilters() { + return issuesList + } + + analyzerSet := makeStringSet(opts.AnalyzerFilters) + categorySet := makeStringSet(opts.CategoryFilters) + severitySet := makeStringSet(opts.SeverityFilters) + codeSet := makeStringSet(opts.CodeFilters) + sourceSet := makeStringSet(opts.SourceFilters) + pathFilters := makeLowerStrings(opts.PathFilters) + + filtered := make([]issues.Issue, 0, len(issuesList)) + for _, issue := range issuesList { + if len(analyzerSet) > 0 && !setContainsFold(analyzerSet, issue.Analyzer.Shortcode) { + continue + } + if len(categorySet) > 0 && !setContainsFold(categorySet, issue.IssueCategory) { + continue + } + if len(severitySet) > 0 && !setContainsFold(severitySet, issue.IssueSeverity) { + continue + } + if len(codeSet) > 0 && !setContainsFold(codeSet, issue.IssueCode) { + continue + } + if len(sourceSet) > 0 && !setContainsFold(sourceSet, issue.IssueSource) { + continue + } + if len(pathFilters) > 0 && !matchesPathFilters(issue.Location.Path, pathFilters) { + continue + } + filtered = append(filtered, issue) + } + + return filtered +} + +func makeStringSet(values []string) map[string]struct{} { + set := make(map[string]struct{}) + for _, value := range values { + normalized := strings.ToLower(strings.TrimSpace(value)) + if normalized == "" { + continue + } + normalized = strings.ReplaceAll(normalized, "-", "_") + set[normalized] = struct{}{} + } + return set +} + +func makeLowerStrings(values []string) []string { + normalized := make([]string, 0, len(values)) + for _, value := range values { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + continue + } + normalized = append(normalized, strings.ToLower(trimmed)) + } + return normalized +} + +func setContainsFold(set map[string]struct{}, value string) bool { + normalized := strings.ToLower(strings.TrimSpace(value)) + normalized = strings.ReplaceAll(normalized, "-", "_") + _, ok := set[normalized] + return ok +} + +func matchesPathFilters(path string, filters []string) bool { + if path == "" { + return false + } + lowerPath := strings.ToLower(path) + for _, filter := range filters { + if filter == "" { + continue + } + if strings.Contains(lowerPath, filter) { + return true + } + } + return false +} + +// --- Human output --- + +func (opts *IssuesOptions) outputHuman() error { + if len(opts.issues) == 0 { + if opts.hasFilters() { + pterm.Info.Println("No issues matched the provided filters.") + } else { + pterm.Info.Println("No issues found.") + } + return nil + } + + cwd, _ := os.Getwd() + + order := []string{"CRITICAL", "MAJOR", "MINOR"} + groups := make(map[string][]issues.Issue) + for _, issue := range opts.issues { + sev := strings.ToUpper(issue.IssueSeverity) + groups[sev] = append(groups[sev], issue) + } + + for _, sev := range order { + group, ok := groups[sev] + if !ok || len(group) == 0 { + continue + } + + header := fmt.Sprintf("%s (%d)", humanizeSeverity(sev), len(group)) + fmt.Println(colorSeverity(sev, header)) + fmt.Println() + + for _, issue := range group { + location := formatLocation(issue, cwd) + category := humanizeCategory(issue.IssueCategory) + fmt.Printf(" %s: %s (%s)\n", category, issue.IssueText, location) + + if opts.Verbose { + analyzer := analyzerDisplayName(issue.Analyzer) + fmt.Printf(" %s · %s\n", issue.IssueCode, analyzer) + if issue.Description != "" { + fmt.Printf(" %s\n", issue.Description) + } + fmt.Println() + } + } + + if !opts.Verbose { + fmt.Println() + } + } + + fmt.Printf("Showing %d issue(s) in %s", len(opts.issues), opts.repoSlug) + switch { + case opts.CommitOid != "": + fmt.Printf(" from commit %s\n", opts.CommitOid) + case opts.PRNumber > 0: + fmt.Printf(" from PR #%d\n", opts.PRNumber) + default: + fmt.Println(" from default branch") + } + return nil +} + +// --- Table output --- + +func (opts *IssuesOptions) outputTable() error { + if len(opts.issues) == 0 { + if opts.hasFilters() { + pterm.Info.Println("No issues matched the provided filters.") + } else { + pterm.Info.Println("No issues found.") + } + return nil + } + + showSource := opts.CommitOid != "" || opts.PRNumber > 0 + + var header []string + if showSource { + header = []string{"Location", "Source", "Analyzer", "Code", "Title", "Category", "Severity"} + } else { + header = []string{"Location", "Analyzer", "Code", "Title", "Category", "Severity"} + } + data := [][]string{header} + + cwd, _ := os.Getwd() + + for _, issue := range opts.issues { + location := formatLocation(issue, cwd) + severity := formatSeverity(issue.IssueSeverity) + category := humanizeCategory(issue.IssueCategory) + analyzer := analyzerDisplayName(issue.Analyzer) + + if showSource { + data = append(data, []string{ + location, + issue.IssueSource, + analyzer, + issue.IssueCode, + issue.IssueText, + category, + severity, + }) + } else { + data = append(data, []string{ + location, + analyzer, + issue.IssueCode, + issue.IssueText, + category, + severity, + }) + } + } + + pterm.DefaultTable.WithHasHeader().WithData(data).Render() + pterm.Printf("\nShowing %d issue(s)\n", len(opts.issues)) + + return nil +} + +// --- JSON/YAML output --- + +type IssueJSON struct { + Path string `json:"path" yaml:"path"` + BeginLine int `json:"begin_line" yaml:"begin_line"` + EndLine int `json:"end_line" yaml:"end_line"` + IssueCode string `json:"issue_code" yaml:"issue_code"` + Title string `json:"title" yaml:"title"` + Category string `json:"category" yaml:"category"` + Severity string `json:"severity" yaml:"severity"` + Source string `json:"source" yaml:"source"` + Analyzer string `json:"analyzer" yaml:"analyzer"` +} + +func (opts *IssuesOptions) toJSONIssues() []IssueJSON { + result := make([]IssueJSON, 0, len(opts.issues)) + for _, issue := range opts.issues { + result = append(result, IssueJSON{ + Path: issue.Location.Path, + BeginLine: issue.Location.Position.BeginLine, + EndLine: issue.Location.Position.EndLine, + IssueCode: issue.IssueCode, + Title: issue.IssueText, + Category: issue.IssueCategory, + Severity: issue.IssueSeverity, + Source: issue.IssueSource, + Analyzer: issue.Analyzer.Shortcode, + }) + } + return result +} + +func (opts *IssuesOptions) outputJSON() error { + data, err := json.MarshalIndent(opts.toJSONIssues(), "", " ") + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format JSON output", err) + } + return opts.writeOutput(data, true) +} + +func (opts *IssuesOptions) outputYAML() error { + data, err := yaml.Marshal(opts.toJSONIssues()) + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format YAML output", err) + } + return opts.writeOutput(data, false) +} + +func (opts *IssuesOptions) writeOutput(data []byte, trailingNewline bool) error { + if opts.OutputFile == "" { + w := opts.stdout() + if trailingNewline { + fmt.Fprintln(w, string(data)) + } else { + fmt.Fprint(w, string(data)) + } + return nil + } + + if err := os.WriteFile(opts.OutputFile, data, 0644); err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to write output file", err) + } + pterm.Printf("Saved issues to %s!\n", opts.OutputFile) + return nil +} + +// --- Formatting helpers --- + +func humanizeSeverity(s string) string { + switch strings.ToUpper(s) { + case "CRITICAL": + return "Critical" + case "MAJOR": + return "Major" + case "MINOR": + return "Minor" + default: + return s + } +} + +func humanizeCategory(c string) string { + switch strings.ToUpper(c) { + case "BUG_RISK": + return "Bug Risk" + case "ANTI_PATTERN": + return "Anti-pattern" + case "SECURITY": + return "Security" + case "PERFORMANCE": + return "Performance" + case "COVERAGE": + return "Coverage" + case "TYPECHECK": + return "Typecheck" + case "STYLE": + return "Style" + case "DOCUMENTATION": + return "Documentation" + default: + return c + } +} + +func analyzerDisplayName(meta issues.AnalyzerMeta) string { + if meta.Name != "" { + return meta.Name + } + return meta.Shortcode +} + +func colorSeverity(sev string, text string) string { + switch strings.ToUpper(sev) { + case "CRITICAL": + return pterm.Red(text) + case "MAJOR": + return pterm.LightRed(text) + case "MINOR": + return pterm.Yellow(text) + default: + return text + } +} + +func formatSeverity(severity string) string { + humanized := humanizeSeverity(severity) + switch strings.ToUpper(severity) { + case "CRITICAL": + return pterm.Red(humanized) + case "MAJOR": + return pterm.LightRed(humanized) + case "MINOR": + return pterm.Yellow(humanized) + default: + return humanized + } +} + +func formatLocation(issue issues.Issue, cwd string) string { + filePath := issue.Location.Path + if cwd != "" && strings.HasPrefix(filePath, cwd) { + filePath = strings.TrimPrefix(filePath, cwd+"/") + } + if issue.Location.Position.BeginLine == issue.Location.Position.EndLine { + return fmt.Sprintf("%s:%d", filePath, issue.Location.Position.BeginLine) + } + return fmt.Sprintf("%s:%d-%d", filePath, issue.Location.Position.BeginLine, issue.Location.Position.EndLine) +} + diff --git a/command/issues/list/list.go b/command/issues/list/list.go deleted file mode 100644 index fe89d117..00000000 --- a/command/issues/list/list.go +++ /dev/null @@ -1,292 +0,0 @@ -package list - -import ( - "context" - "encoding/csv" - "encoding/json" - "fmt" - "io/ioutil" - "os" - - "github.com/MakeNowJust/heredoc" - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/deepsource/issues" - "github.com/deepsourcelabs/cli/utils" - "github.com/pterm/pterm" - "github.com/spf13/cobra" -) - -const MAX_ISSUE_LIMIT = 100 - -type IssuesListOptions struct { - FileArg []string - RepoArg string - AnalyzerArg []string - LimitArg int - OutputFilenameArg string - JSONArg bool - CSVArg bool - SARIFArg bool - SelectedRemote *utils.RemoteData - issuesData []issues.Issue - ptermTable [][]string -} - -func NewCmdIssuesList() *cobra.Command { - opts := IssuesListOptions{ - FileArg: []string{""}, - RepoArg: "", - LimitArg: 30, - } - - doc := heredoc.Docf(` - List issues reported by DeepSource. - - To list issues for the current repository: - %[1]s - - To list issues for a specific repository, use the %[2]s flag: - %[3]s - - To list issues for a specific analyzer, use the %[4]s flag: - %[5]s - - To limit the number of issues reported, use the %[6]s flag: - %[7]s - - To export listed issues to a file, use the %[8]s flag: - %[9]s - - To export listed issues to a JSON file, use the %[10]s flag: - %[11]s - - To export listed issues to a CSV file, use the %[12]s flag: - %[13]s - - To export listed issues to a SARIF file, use the %[14]s flag: - %[15]s - `, utils.Cyan("deepsource issues list"), utils.Yellow("--repo"), utils.Cyan("deepsource issues list --repo repo_name"), utils.Yellow("--analyzer"), utils.Cyan("deepsource issues list --analyzer python"), utils.Yellow("--limit"), utils.Cyan("deepsource issues list --limit 100"), utils.Yellow("--output-file"), utils.Cyan("deepsource issues list --output-file file_name"), utils.Yellow("--json"), utils.Cyan("deepsource issues list --json --output-file example.json"), utils.Yellow("--csv"), utils.Cyan("deepsource issues list --csv --output-file example.csv"), utils.Yellow("--sarif"), utils.Cyan("deepsource issues list --sarif --output-file example.sarif")) - - cmd := &cobra.Command{ - Use: "list", - Short: "List issues reported by DeepSource", - Long: doc, - RunE: func(cmd *cobra.Command, args []string) error { - opts.FileArg = args - return opts.Run() - }, - } - - // --repo, -r flag - cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "List the issues of the specified repository") - - // --analyzer, -a flag - cmd.Flags().StringArrayVarP(&opts.AnalyzerArg, "analyzer", "a", nil, "List the issues for the specified analyzer") - - // --limit, -l flag - cmd.Flags().IntVarP(&opts.LimitArg, "limit", "l", 30, "Fetch the issues upto the specified limit") - - // --output-file, -o flag - cmd.Flags().StringVarP(&opts.OutputFilenameArg, "output-file", "o", "", "Output file to write the reported issues to") - - // --json flag - cmd.Flags().BoolVar(&opts.JSONArg, "json", false, "Output reported issues in JSON format") - - // --csv flag - cmd.Flags().BoolVar(&opts.CSVArg, "csv", false, "Output reported issues in CSV format") - - // --sarif flag - cmd.Flags().BoolVar(&opts.SARIFArg, "sarif", false, "Output reported issues in SARIF format") - - return cmd -} - -// Execute the command -func (opts *IssuesListOptions) Run() (err error) { - // Fetch config - cfg, err := config.GetConfig() - if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) - } - err = cfg.VerifyAuthentication() - if err != nil { - return err - } - - // The current limit of querying issues at once is 100. - // If the limit passed by user is greater than 100, exit - // with an error message - if opts.LimitArg > MAX_ISSUE_LIMIT { - return fmt.Errorf("The maximum allowed limit to fetch issues is 100. Found %d", opts.LimitArg) - } - - // Get the remote repository URL for which issues have to be listed - opts.SelectedRemote, err = utils.ResolveRemote(opts.RepoArg) - if err != nil { - return err - } - - // Fetch the list of issues using SDK (deepsource package) based on user input - ctx := context.Background() - err = opts.getIssuesData(ctx) - if err != nil { - return err - } - - if opts.JSONArg { - opts.exportJSON(opts.OutputFilenameArg) - } else if opts.CSVArg { - opts.exportCSV(opts.OutputFilenameArg) - } else if opts.SARIFArg { - opts.exportSARIF(opts.OutputFilenameArg) - } else { - opts.showIssues() - } - - return nil -} - -// Gets the data about issues using the SDK based on the user input -// i.e for a single file or for the whole project -func (opts *IssuesListOptions) getIssuesData(ctx context.Context) (err error) { - // Get the deepsource client for using the issue fetching SDK to fetch the list of issues - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return err - } - - // Fetch list of issues for the whole project - opts.issuesData, err = deepsource.GetIssues(ctx, opts.SelectedRemote.Owner, opts.SelectedRemote.RepoName, opts.SelectedRemote.VCSProvider, opts.LimitArg) - if err != nil { - return err - } - - var filteredIssues []issues.Issue - - // Fetch issues for a certain FileArg (filepath) passed by the user - // Example: `deepsource issues list api/hello.py` - if len(opts.FileArg) != 0 { - var fetchedIssues []issues.Issue - for _, arg := range opts.FileArg { - // Filter issues for the valid directories/files - filteredIssues, err = filterIssuesByPath(arg, opts.issuesData) - if err != nil { - return err - } - fetchedIssues = append(fetchedIssues, filteredIssues...) - } - - // set fetched issues as issue data - opts.issuesData = getUniqueIssues(fetchedIssues) - } - - if len(opts.AnalyzerArg) != 0 { - var fetchedIssues []issues.Issue - - // Filter issues based on the analyzer shortcode - filteredIssues, err = filterIssuesByAnalyzer(opts.AnalyzerArg, opts.issuesData) - if err != nil { - return err - } - fetchedIssues = append(fetchedIssues, filteredIssues...) - - // set fetched issues as issue data - opts.issuesData = getUniqueIssues(fetchedIssues) - } - - return nil -} - -// Parses the SDK response and formats the data in the form of a TAB separated table -// and renders it using pterm -func (opts *IssuesListOptions) showIssues() { - // A 2d array to contain list of issues details arrays - opts.ptermTable = make([][]string, len(opts.issuesData)) - - // Curating the data and appending to the 2d array - for index, issue := range opts.issuesData { - filePath := issue.Location.Path - beginLine := issue.Location.Position.BeginLine - issueLocation := fmt.Sprintf("%s:%d", filePath, beginLine) - analyzerShortcode := issue.Analyzer.Shortcode - issueCategory := issue.IssueCategory - issueSeverity := issue.IssueSeverity - issueCode := issue.IssueCode - issueTitle := issue.IssueText - - opts.ptermTable[index] = []string{issueLocation, analyzerShortcode, issueCode, issueTitle, issueCategory, issueSeverity} - } - // Using pterm to render the list of list - pterm.DefaultTable.WithSeparator("\t").WithData(opts.ptermTable).Render() -} - -// Handles exporting issues as JSON -func (opts *IssuesListOptions) exportJSON(filename string) (err error) { - issueJSON := convertJSON(opts.issuesData) - data, err := json.MarshalIndent(issueJSON, "", " ") - if err != nil { - return err - } - - if filename == "" { - pterm.Println(string(data)) - return nil - } - - if err = ioutil.WriteFile(filename, data, 0o644); err != nil { - return err - } - - pterm.Info.Printf("Saved issues to %s!\n", filename) - return nil -} - -// Handles exporting issues as CSV -func (opts *IssuesListOptions) exportCSV(filename string) error { - records := convertCSV(opts.issuesData) - - if filename == "" { - // write to stdout - w := csv.NewWriter(os.Stdout) - return w.WriteAll(records) - } - - // create file - file, err := os.Create(filename) - if err != nil { - return err - } - - // write to file - w := csv.NewWriter(file) - w.WriteAll(records) - if err := w.Error(); err != nil { - return err - } - - pterm.Info.Printf("Saved issues to %s!\n", filename) - return nil -} - -// Handles exporting issues as a SARIF file -func (opts *IssuesListOptions) exportSARIF(filename string) (err error) { - report := convertSARIF(opts.issuesData) - if filename == "" { - err = report.PrettyWrite(os.Stdout) - if err != nil { - return err - } - return nil - } - - // write report to file - if err := report.WriteFile(filename); err != nil { - return err - } - pterm.Info.Printf("Saved issues to %s!\n", filename) - return nil -} diff --git a/command/issues/list/list_test.go b/command/issues/list/list_test.go deleted file mode 100644 index b3fe79dd..00000000 --- a/command/issues/list/list_test.go +++ /dev/null @@ -1,159 +0,0 @@ -package list - -import ( - "encoding/json" - "io/ioutil" - "os" - "reflect" - "strings" - "testing" - - "github.com/deepsourcelabs/cli/deepsource/issues" -) - -// Helper function to read issues from a file. -func ReadIssues(path string) []issues.Issue { - raw, _ := ioutil.ReadFile(path) - var fetchedIssues []issues.Issue - _ = json.Unmarshal(raw, &fetchedIssues) - - return fetchedIssues -} - -func TestListCSV(t *testing.T) { - issues_data := ReadIssues("./testdata/dummy/issues.json") - opts := IssuesListOptions{issuesData: issues_data} - opts.exportCSV("./testdata/exported.csv") - - // read exported and test CSV files - exported, _ := ioutil.ReadFile("./testdata/exported.csv") - test, _ := ioutil.ReadFile("./testdata/csv/test.csv") - - // trim carriage returns - got := strings.TrimSuffix(string(exported), "\n") - want := strings.TrimSuffix(string(test), "\n") - - // cleanup after test - _ = os.Remove("./testdata/exported.csv") - - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } -} - -func TestListJSON(t *testing.T) { - issues_data := ReadIssues("./testdata/dummy/issues.json") - opts := IssuesListOptions{issuesData: issues_data} - opts.exportJSON("./testdata/exported.json") - - // read exported and test JSON files - exported, _ := ioutil.ReadFile("./testdata/exported.json") - test, _ := ioutil.ReadFile("./testdata/json/test.json") - - // trim carriage returns - got := strings.TrimSuffix(string(exported), "\n") - want := strings.TrimSuffix(string(test), "\n") - - // cleanup after test - _ = os.Remove("./testdata/exported.json") - - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } -} - -func TestListSARIF(t *testing.T) { - t.Run("must work with single language repositories", func(t *testing.T) { - // export issues to SARIF - issues_data := ReadIssues("./testdata/dummy/issues.json") - - opts := IssuesListOptions{issuesData: issues_data} - opts.exportSARIF("./testdata/exported.sarif") - - // read exported and test SARIF files - exported, _ := ioutil.ReadFile("./testdata/exported.sarif") - test, _ := ioutil.ReadFile("./testdata/sarif/test.sarif") - - // trim carriage returns - got := strings.TrimSuffix(string(exported), "\n") - want := strings.TrimSuffix(string(test), "\n") - - // cleanup after test - _ = os.Remove("./testdata/exported.sarif") - - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } - }) - - t.Run("must work with repositories containing multiple languages", func(t *testing.T) { - // export issues to SARIF - issues_data := ReadIssues("./testdata/dummy/issues_data_multi.json") - - opts := IssuesListOptions{issuesData: issues_data} - opts.exportSARIF("./testdata/exported_multi.sarif") - - // read exported and test SARIF files - exported, _ := ioutil.ReadFile("./testdata/exported_multi.sarif") - test, _ := ioutil.ReadFile("./testdata/sarif/test_multi.sarif") - - // trim carriage returns - got := strings.TrimSuffix(string(exported), "\n") - want := strings.TrimSuffix(string(test), "\n") - - // cleanup after test - _ = os.Remove("./testdata/exported_multi.sarif") - - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } - }) -} - -func TestFilterIssuesByPath(t *testing.T) { - t.Run("must work with files", func(t *testing.T) { - issues_data := ReadIssues("./testdata/dummy/issues_data_multi.json") - issues_docker := ReadIssues("./testdata/dummy/issues_docker.json") - - got, _ := filterIssuesByPath("Dockerfile", issues_data) - want := issues_docker - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } - }) - - t.Run("must work with directories", func(t *testing.T) { - issues_data := ReadIssues("./testdata/dummy/issues_data_multi.json") - issues_deepsource := ReadIssues("./testdata/dummy/issues_deepsource.json") - - got, _ := filterIssuesByPath("deepsource/", issues_data) - want := issues_deepsource - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } - }) -} - -func TestFilterIssuesByAnalyzer(t *testing.T) { - t.Run("must work with a single analyzer", func(t *testing.T) { - issues_data := ReadIssues("./testdata/dummy/issues_data_multi.json") - issues_docker := ReadIssues("./testdata/dummy/issues_docker.json") - - got, _ := filterIssuesByAnalyzer([]string{"docker"}, issues_data) - want := issues_docker - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } - }) - - t.Run("must work with multiple analyzers", func(t *testing.T) { - issues_data := ReadIssues("./testdata/dummy/issues_data_multi.json") - issues_multi_analyzers := ReadIssues("./testdata/dummy/issues_multiple_analyzers.json") - - got, _ := filterIssuesByAnalyzer([]string{"docker", "python"}, issues_data) - want := issues_multi_analyzers - if !reflect.DeepEqual(got, want) { - t.Errorf("got: %v; want: %v\n", got, want) - } - }) -} diff --git a/command/issues/list/testdata/csv/test.csv b/command/issues/list/testdata/csv/test.csv deleted file mode 100644 index 511ede7b..00000000 --- a/command/issues/list/testdata/csv/test.csv +++ /dev/null @@ -1,3 +0,0 @@ -analyzer,issue_code,issue_title,occurence_title,issue_category,path,begin_line,begin_column,end_line,end_column -go,RVV-B0013,Unused method receiver detected,Unused method receiver detected,,deepsource/transformers/queries/get_transformers.go,34,0,34,0 -go,RVV-B0013,Unused method receiver detected,Unused method receiver detected,,deepsource/transformers/queries/get_transformers.go,44,0,44,0 diff --git a/command/issues/list/testdata/dummy/issues.json b/command/issues/list/testdata/dummy/issues.json deleted file mode 100644 index f61aee25..00000000 --- a/command/issues/list/testdata/dummy/issues.json +++ /dev/null @@ -1 +0,0 @@ -[{"issue_title":"Unused method receiver detected","issue_code":"RVV-B0013","location":{"path":"deepsource/transformers/queries/get_transformers.go","position":{"begin":34,"end":34}},"Analyzer":{"analyzer":"go"}},{"issue_title":"Unused method receiver detected","issue_code":"RVV-B0013","location":{"path":"deepsource/transformers/queries/get_transformers.go","position":{"begin":44,"end":44}},"Analyzer":{"analyzer":"go"}}] \ No newline at end of file diff --git a/command/issues/list/testdata/dummy/issues_data_multi.json b/command/issues/list/testdata/dummy/issues_data_multi.json deleted file mode 100644 index 0c866f89..00000000 --- a/command/issues/list/testdata/dummy/issues_data_multi.json +++ /dev/null @@ -1 +0,0 @@ -[{"issue_title":"Unused method receiver detected","issue_code":"RVV-B0013","location":{"path":"deepsource/transformers/queries/get_transformers.go","position":{"begin":34,"end":34}},"Analyzer":{"analyzer":"go"}},{"issue_title":"Unused method receiver detected","issue_code":"RVV-B0013","location":{"path":"deepsource/transformers/queries/get_transformers.go","position":{"begin":44,"end":44}},"Analyzer":{"analyzer":"go"}},{"issue_title":"Use arguments JSON notation for CMD and ENTRYPOINT arguments","issue_code":"DOK-DL3025","location":{"path":"Dockerfile","position":{"begin":64,"end":64}},"Analyzer":{"analyzer":"docker"}},{"issue_title":"Imported name is not used anywhere in the module","issue_code":"PY-W2000","location":{"path":"python/demo.py","position":{"begin":1,"end":1}},"Analyzer":{"analyzer":"python"}}] \ No newline at end of file diff --git a/command/issues/list/testdata/dummy/issues_deepsource.json b/command/issues/list/testdata/dummy/issues_deepsource.json deleted file mode 100644 index f61aee25..00000000 --- a/command/issues/list/testdata/dummy/issues_deepsource.json +++ /dev/null @@ -1 +0,0 @@ -[{"issue_title":"Unused method receiver detected","issue_code":"RVV-B0013","location":{"path":"deepsource/transformers/queries/get_transformers.go","position":{"begin":34,"end":34}},"Analyzer":{"analyzer":"go"}},{"issue_title":"Unused method receiver detected","issue_code":"RVV-B0013","location":{"path":"deepsource/transformers/queries/get_transformers.go","position":{"begin":44,"end":44}},"Analyzer":{"analyzer":"go"}}] \ No newline at end of file diff --git a/command/issues/list/testdata/dummy/issues_docker.json b/command/issues/list/testdata/dummy/issues_docker.json deleted file mode 100644 index 61642050..00000000 --- a/command/issues/list/testdata/dummy/issues_docker.json +++ /dev/null @@ -1 +0,0 @@ -[{"issue_title":"Use arguments JSON notation for CMD and ENTRYPOINT arguments","issue_code":"DOK-DL3025","location":{"path":"Dockerfile","position":{"begin":64,"end":64}},"Analyzer":{"analyzer":"docker"}}] \ No newline at end of file diff --git a/command/issues/list/testdata/dummy/issues_multiple_analyzers.json b/command/issues/list/testdata/dummy/issues_multiple_analyzers.json deleted file mode 100644 index b291856e..00000000 --- a/command/issues/list/testdata/dummy/issues_multiple_analyzers.json +++ /dev/null @@ -1 +0,0 @@ -[{"issue_title":"Use arguments JSON notation for CMD and ENTRYPOINT arguments","issue_code":"DOK-DL3025","location":{"path":"Dockerfile","position":{"begin":64,"end":64}},"Analyzer":{"analyzer":"docker"}},{"issue_title":"Imported name is not used anywhere in the module","issue_code":"PY-W2000","location":{"path":"python/demo.py","position":{"begin":1,"end":1}},"Analyzer":{"analyzer":"python"}}] diff --git a/command/issues/list/testdata/json/test.json b/command/issues/list/testdata/json/test.json deleted file mode 100644 index a9d0e845..00000000 --- a/command/issues/list/testdata/json/test.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "occurences": [ - { - "analyzer": "go", - "issue_code": "RVV-B0013", - "issue_title": "Unused method receiver detected", - "occurence_title": "Unused method receiver detected", - "issue_category": "", - "location": { - "path": "deepsource/transformers/queries/get_transformers.go", - "position": { - "begin": { - "line": 34, - "column": 0 - }, - "end": { - "line": 34, - "column": 0 - } - } - } - }, - { - "analyzer": "go", - "issue_code": "RVV-B0013", - "issue_title": "Unused method receiver detected", - "occurence_title": "Unused method receiver detected", - "issue_category": "", - "location": { - "path": "deepsource/transformers/queries/get_transformers.go", - "position": { - "begin": { - "line": 44, - "column": 0 - }, - "end": { - "line": 44, - "column": 0 - } - } - } - } - ], - "summary": { - "total_occurences": 2, - "unique_issues": 1 - } -} \ No newline at end of file diff --git a/command/issues/list/testdata/sarif/test.sarif b/command/issues/list/testdata/sarif/test.sarif deleted file mode 100644 index 98cdc3c3..00000000 --- a/command/issues/list/testdata/sarif/test.sarif +++ /dev/null @@ -1,75 +0,0 @@ -{ - "version": "2.1.0", - "$schema": "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", - "runs": [ - { - "tool": { - "driver": { - "informationUri": "https://deepsource.io/directory/analyzers/go", - "name": "DeepSource Go Analyzer", - "rules": [ - { - "id": "RVV-B0013", - "name": "Unused method receiver detected", - "shortDescription": null, - "fullDescription": { - "text": "" - }, - "helpUri": "https://deepsource.io/directory/analyzers/go/issues/RVV-B0013", - "properties": { - "category": "", - "recommended": "" - } - } - ] - } - }, - "results": [ - { - "ruleId": "RVV-B0013", - "ruleIndex": 0, - "kind": "fail", - "level": "error", - "message": { - "text": "Unused method receiver detected" - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": "deepsource/transformers/queries/get_transformers.go" - }, - "region": { - "startLine": 34, - "endLine": 34 - } - } - } - ] - }, - { - "ruleId": "RVV-B0013", - "ruleIndex": 1, - "kind": "fail", - "level": "error", - "message": { - "text": "Unused method receiver detected" - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": "deepsource/transformers/queries/get_transformers.go" - }, - "region": { - "startLine": 44, - "endLine": 44 - } - } - } - ] - } - ] - } - ] -} diff --git a/command/issues/list/testdata/sarif/test_multi.sarif b/command/issues/list/testdata/sarif/test_multi.sarif deleted file mode 100644 index 9e2de43b..00000000 --- a/command/issues/list/testdata/sarif/test_multi.sarif +++ /dev/null @@ -1,169 +0,0 @@ -{ - "version": "2.1.0", - "$schema": "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", - "runs": [ - { - "tool": { - "driver": { - "informationUri": "https://deepsource.io/directory/analyzers/go", - "name": "DeepSource Go Analyzer", - "rules": [ - { - "id": "RVV-B0013", - "name": "Unused method receiver detected", - "shortDescription": null, - "fullDescription": { - "text": "" - }, - "helpUri": "https://deepsource.io/directory/analyzers/go/issues/RVV-B0013", - "properties": { - "category": "", - "recommended": "" - } - } - ] - } - }, - "results": [ - { - "ruleId": "RVV-B0013", - "ruleIndex": 0, - "kind": "fail", - "level": "error", - "message": { - "text": "Unused method receiver detected" - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": "deepsource/transformers/queries/get_transformers.go" - }, - "region": { - "startLine": 34, - "endLine": 34 - } - } - } - ] - }, - { - "ruleId": "RVV-B0013", - "ruleIndex": 1, - "kind": "fail", - "level": "error", - "message": { - "text": "Unused method receiver detected" - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": "deepsource/transformers/queries/get_transformers.go" - }, - "region": { - "startLine": 44, - "endLine": 44 - } - } - } - ] - } - ] - }, - { - "tool": { - "driver": { - "informationUri": "https://deepsource.io/directory/analyzers/docker", - "name": "DeepSource Docker Analyzer", - "rules": [ - { - "id": "DOK-DL3025", - "name": "Use arguments JSON notation for CMD and ENTRYPOINT arguments", - "shortDescription": null, - "fullDescription": { - "text": "" - }, - "helpUri": "https://deepsource.io/directory/analyzers/docker/issues/DOK-DL3025", - "properties": { - "category": "", - "recommended": "" - } - } - ] - } - }, - "results": [ - { - "ruleId": "DOK-DL3025", - "ruleIndex": 0, - "kind": "fail", - "level": "error", - "message": { - "text": "Use arguments JSON notation for CMD and ENTRYPOINT arguments" - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": "Dockerfile" - }, - "region": { - "startLine": 64, - "endLine": 64 - } - } - } - ] - } - ] - }, - { - "tool": { - "driver": { - "informationUri": "https://deepsource.io/directory/analyzers/python", - "name": "DeepSource Python Analyzer", - "rules": [ - { - "id": "PY-W2000", - "name": "Imported name is not used anywhere in the module", - "shortDescription": null, - "fullDescription": { - "text": "" - }, - "helpUri": "https://deepsource.io/directory/analyzers/python/issues/PY-W2000", - "properties": { - "category": "", - "recommended": "" - } - } - ] - } - }, - "results": [ - { - "ruleId": "PY-W2000", - "ruleIndex": 0, - "kind": "fail", - "level": "error", - "message": { - "text": "Imported name is not used anywhere in the module" - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": "python/demo.py" - }, - "region": { - "startLine": 1, - "endLine": 1 - } - } - } - ] - } - ] - } - ] -} diff --git a/command/issues/list/types.go b/command/issues/list/types.go deleted file mode 100644 index 30d97f13..00000000 --- a/command/issues/list/types.go +++ /dev/null @@ -1,27 +0,0 @@ -package list - -// custom types for JSON marshaling - -type IssueJSON struct { - Analyzer string `json:"analyzer"` - IssueCode string `json:"issue_code"` - IssueTitle string `json:"issue_title"` - OccurenceTitle string `json:"occurence_title"` - IssueCategory string `json:"issue_category"` - Location LocationJSON `json:"location"` -} - -type LocationJSON struct { - Path string `json:"path"` // The filepath where the issue is reported - Position PositionJSON `json:"position"` // The position info where the issue is raised -} - -type PositionJSON struct { - Begin LineColumn `json:"begin"` // The line where the code covered under the issue starts - End LineColumn `json:"end"` // The line where the code covered under the issue starts -} - -type LineColumn struct { - Line int `json:"line"` - Column int `json:"column"` -} diff --git a/command/issues/list/utils.go b/command/issues/list/utils.go deleted file mode 100644 index cbdd86f3..00000000 --- a/command/issues/list/utils.go +++ /dev/null @@ -1,237 +0,0 @@ -package list - -import ( - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/deepsourcelabs/cli/deepsource/issues" - "github.com/owenrumney/go-sarif/v2/sarif" -) - -type ExportData struct { - Occurences []IssueJSON `json:"occurences"` - Summary Summary `json:"summary"` -} - -type Summary struct { - TotalOccurences int `json:"total_occurences"` - UniqueIssues int `json:"unique_issues"` -} - -/////////////////////// -// Filtering utilities -/////////////////////// - -// Filters issues based on a path, works for both directories and files -func filterIssuesByPath(path string, issuesData []issues.Issue) ([]issues.Issue, error) { - var filteredIssues []issues.Issue - for _, issue := range issuesData { - up := ".." + string(os.PathSeparator) - - // get relative path - rel, err := filepath.Rel(path, issue.Location.Path) - if err != nil { - return nil, err - } - - // handle files - if rel == "." { - filteredIssues = append(filteredIssues, issue) - } - - // check if the relative path has a parent directory - if !strings.HasPrefix(rel, up) && rel != ".." { - filteredIssues = append(filteredIssues, issue) - } - } - - return getUniqueIssues(filteredIssues), nil -} - -// Filters issues based on the analyzer shortcode. -func filterIssuesByAnalyzer(analyzer []string, issuesData []issues.Issue) ([]issues.Issue, error) { - var filteredIssues []issues.Issue - - // maintain a map of analyzer shortcodes - analyzerMap := make(map[string]bool) - for _, shortcode := range analyzer { - analyzerMap[shortcode] = true - } - - for _, issue := range issuesData { - if analyzerMap[issue.Analyzer.Shortcode] { - filteredIssues = append(filteredIssues, issue) - } - } - - return getUniqueIssues(filteredIssues), nil -} - -// Returns de-duplicated issues. -func getUniqueIssues(fetchedIssues []issues.Issue) []issues.Issue { - var uniqueIssues []issues.Issue - - // inUnique is a map which is used for checking whether an issue exists already or not - inUnique := make(map[issues.Issue]bool) - - for _, issue := range fetchedIssues { - // if the issue isn't present in inUnique, append the issue to uniqueIssues and update inUnique - if _, ok := inUnique[issue]; !ok { - inUnique[issue] = true - uniqueIssues = append(uniqueIssues, issue) - } - } - - return uniqueIssues -} - -/////////////////////// -// Conversion utilities -/////////////////////// - -// Converts issueData to a JSON-compatible struct -func convertJSON(issueData []issues.Issue) ExportData { - var occurences []IssueJSON - var issueExport ExportData - - set := make(map[string]string) - total_occurences := 0 - - for _, issue := range issueData { - issueNew := IssueJSON{ - Analyzer: issue.Analyzer.Shortcode, - IssueCode: issue.IssueCode, - IssueTitle: issue.IssueText, - OccurenceTitle: issue.IssueText, - IssueCategory: "", - Location: LocationJSON{ - Path: issue.Location.Path, - Position: PositionJSON{ - Begin: LineColumn{ - Line: issue.Location.Position.BeginLine, - Column: 0, - }, - End: LineColumn{ - Line: issue.Location.Position.EndLine, - Column: 0, - }, - }, - }, - } - - total_occurences += 1 - set[issue.IssueCode] = "" - - occurences = append(occurences, issueNew) - } - - issueExport.Occurences = occurences - issueExport.Summary.TotalOccurences = total_occurences - issueExport.Summary.UniqueIssues = len(set) - - return issueExport -} - -// Converts issueData to a CSV records -func convertCSV(issueData []issues.Issue) [][]string { - records := [][]string{{"analyzer", "issue_code", "issue_title", "occurence_title", "issue_category", "path", "begin_line", "begin_column", "end_line", "end_column"}} - - for _, issue := range issueData { - issueNew := []string{issue.Analyzer.Shortcode, issue.IssueCode, issue.IssueText, issue.IssueText, "", issue.Location.Path, fmt.Sprint(issue.Location.Position.BeginLine), "0", fmt.Sprint(issue.Location.Position.EndLine), "0"} - - records = append(records, issueNew) - } - - return records -} - -// Converts issueData to a SARIF report -func convertSARIF(issueData []issues.Issue) *sarif.Report { - report, err := sarif.New(sarif.Version210) - if err != nil { - return nil - } - - // use a map of shortcodes to append rules and results - type boolIndex struct { - exists bool - index int - } - shortcodes := make(map[string]boolIndex) - var runs []*sarif.Run - count := 0 - - // Adding the tools data to the SARIF report corresponding to the number of analyzers activated - for _, issue := range issueData { - if !shortcodes[issue.Analyzer.Shortcode].exists { - driverName := "DeepSource " + strings.Title(issue.Analyzer.Shortcode) + " Analyzer" - informationURI := "https://deepsource.io/directory/analyzers/" + string(issue.Analyzer.Shortcode) - - tool := sarif.Tool{ - Driver: &sarif.ToolComponent{ - Name: driverName, - InformationURI: &informationURI, - }, - } - - run := sarif.NewRun(tool) - runs = append(runs, run) - - // update boolIndex - shortcodes[issue.Analyzer.Shortcode] = boolIndex{exists: true, index: count} - count += 1 - } - } - - // use an index map for updating rule index value - idxMap := make(map[int]int) - - // Adding the results data for each analyzer in the report - for _, issue := range issueData { - // TODO: Fetch issue description from the API and populate here - textDescription := "" - fullDescription := sarif.MultiformatMessageString{ - Text: &textDescription, - } - - // check if the shortcode exists in the map - if shortcodes[issue.Analyzer.Shortcode].exists { - // fetch shortcode index - idx := shortcodes[issue.Analyzer.Shortcode].index - - // TODO: fetch category and recommended fields - pb := sarif.NewPropertyBag() - pb.Add("category", "") - pb.Add("recommended", "") - - helpURI := "https://deepsource.io/directory/analyzers/" + string(issue.Analyzer.Shortcode) + "/issues/" + string(issue.IssueCode) - - // add rule - runs[idx].AddRule(issue.IssueCode).WithName(issue.IssueText).WithFullDescription(&fullDescription).WithHelpURI(helpURI).WithProperties(pb.Properties) - - // add result - runs[idx].CreateResultForRule(issue.IssueCode).WithLevel("error").WithKind("fail").WithMessage(sarif.NewTextMessage( - issue.IssueText, - )).WithRuleIndex(idxMap[idx]).AddLocation( - sarif.NewLocationWithPhysicalLocation( - sarif.NewPhysicalLocation().WithArtifactLocation( - sarif.NewSimpleArtifactLocation(issue.Location.Path), - ).WithRegion( - sarif.NewSimpleRegion(issue.Location.Position.BeginLine, issue.Location.Position.EndLine), - ), - ), - ) - - idxMap[idx] += 1 - } - } - - // add all runs to report - for _, run := range runs { - report.AddRun(run) - } - - return report -} diff --git a/command/issues/tests/golden_files/commit_scope_output.json b/command/issues/tests/golden_files/commit_scope_output.json new file mode 100644 index 00000000..bd5ac43c --- /dev/null +++ b/command/issues/tests/golden_files/commit_scope_output.json @@ -0,0 +1,24 @@ +[ + { + "path": "cmd/server/main.go", + "begin_line": 42, + "end_line": 42, + "issue_code": "GO-R1005", + "title": "Error return value not checked", + "category": "BUG_RISK", + "severity": "MAJOR", + "source": "static", + "analyzer": "go" + }, + { + "path": "internal/handler/auth.go", + "begin_line": 15, + "end_line": 20, + "issue_code": "GO-S1001", + "title": "Potential SQL injection", + "category": "SECURITY", + "severity": "CRITICAL", + "source": "ai", + "analyzer": "go" + } +] diff --git a/command/issues/tests/golden_files/commit_scope_response.json b/command/issues/tests/golden_files/commit_scope_response.json new file mode 100644 index 00000000..957262e3 --- /dev/null +++ b/command/issues/tests/golden_files/commit_scope_response.json @@ -0,0 +1,46 @@ +{ + "run": { + "checks": { + "edges": [ + { + "node": { + "analyzer": { + "name": "Go", + "shortcode": "go" + }, + "issues": { + "edges": [ + { + "node": { + "source": "static", + "path": "cmd/server/main.go", + "beginLine": 42, + "endLine": 42, + "title": "Error return value not checked", + "shortcode": "GO-R1005", + "explanation": "Error return value of a function call is not checked", + "category": "BUG_RISK", + "severity": "MAJOR" + } + }, + { + "node": { + "source": "ai", + "path": "internal/handler/auth.go", + "beginLine": 15, + "endLine": 20, + "title": "Potential SQL injection", + "shortcode": "GO-S1001", + "explanation": "SQL query constructed with string concatenation", + "category": "SECURITY", + "severity": "CRITICAL" + } + } + ] + } + } + } + ] + } + } +} diff --git a/command/issues/tests/golden_files/default_branch_output.json b/command/issues/tests/golden_files/default_branch_output.json new file mode 100644 index 00000000..5ec49282 --- /dev/null +++ b/command/issues/tests/golden_files/default_branch_output.json @@ -0,0 +1,35 @@ +[ + { + "path": "cmd/server/main.go", + "begin_line": 42, + "end_line": 42, + "issue_code": "GO-R1005", + "title": "Error return value not checked", + "category": "BUG_RISK", + "severity": "MAJOR", + "source": "", + "analyzer": "go" + }, + { + "path": "internal/handler/auth.go", + "begin_line": 15, + "end_line": 20, + "issue_code": "GO-S1001", + "title": "Potential SQL injection", + "category": "SECURITY", + "severity": "CRITICAL", + "source": "", + "analyzer": "go" + }, + { + "path": "pkg/utils/helpers.go", + "begin_line": 88, + "end_line": 88, + "issue_code": "GO-R1003", + "title": "Function too complex", + "category": "ANTI_PATTERN", + "severity": "MINOR", + "source": "", + "analyzer": "go" + } +] diff --git a/command/issues/tests/golden_files/default_branch_response.json b/command/issues/tests/golden_files/default_branch_response.json new file mode 100644 index 00000000..4ca01bad --- /dev/null +++ b/command/issues/tests/golden_files/default_branch_response.json @@ -0,0 +1,89 @@ +{ + "repository": { + "issues": { + "edges": [ + { + "node": { + "occurrences": { + "edges": [ + { + "node": { + "path": "cmd/server/main.go", + "beginLine": 42, + "endLine": 42, + "issue": { + "title": "Error return value not checked", + "shortcode": "GO-R1005", + "shortDescription": "Error return value of a function call is not checked", + "category": "BUG_RISK", + "severity": "MAJOR", + "isRecommended": false, + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + }, + { + "node": { + "occurrences": { + "edges": [ + { + "node": { + "path": "internal/handler/auth.go", + "beginLine": 15, + "endLine": 20, + "issue": { + "title": "Potential SQL injection", + "shortcode": "GO-S1001", + "shortDescription": "SQL query constructed with string concatenation", + "category": "SECURITY", + "severity": "CRITICAL", + "isRecommended": false, + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + }, + { + "node": { + "occurrences": { + "edges": [ + { + "node": { + "path": "pkg/utils/helpers.go", + "beginLine": 88, + "endLine": 88, + "issue": { + "title": "Function too complex", + "shortcode": "GO-R1003", + "shortDescription": "Cyclomatic complexity exceeds threshold", + "category": "ANTI_PATTERN", + "severity": "MINOR", + "isRecommended": false, + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + } + ] + } + } +} diff --git a/command/issues/tests/golden_files/filtered_severity_output.json b/command/issues/tests/golden_files/filtered_severity_output.json new file mode 100644 index 00000000..bc28e145 --- /dev/null +++ b/command/issues/tests/golden_files/filtered_severity_output.json @@ -0,0 +1,13 @@ +[ + { + "path": "internal/handler/auth.go", + "begin_line": 15, + "end_line": 20, + "issue_code": "GO-S1001", + "title": "Potential SQL injection", + "category": "SECURITY", + "severity": "CRITICAL", + "source": "", + "analyzer": "go" + } +] diff --git a/command/issues/tests/golden_files/filtered_severity_response.json b/command/issues/tests/golden_files/filtered_severity_response.json new file mode 100644 index 00000000..8e3d14c8 --- /dev/null +++ b/command/issues/tests/golden_files/filtered_severity_response.json @@ -0,0 +1,89 @@ +{ + "repository": { + "issues": { + "edges": [ + { + "node": { + "occurrences": { + "edges": [ + { + "node": { + "path": "internal/handler/auth.go", + "beginLine": 15, + "endLine": 20, + "issue": { + "title": "Potential SQL injection", + "shortcode": "GO-S1001", + "shortDescription": "SQL query constructed with string concatenation", + "category": "SECURITY", + "severity": "CRITICAL", + "isRecommended": false, + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + }, + { + "node": { + "occurrences": { + "edges": [ + { + "node": { + "path": "cmd/server/main.go", + "beginLine": 42, + "endLine": 42, + "issue": { + "title": "Error return value not checked", + "shortcode": "GO-R1005", + "shortDescription": "Error return value of a function call is not checked", + "category": "BUG_RISK", + "severity": "MAJOR", + "isRecommended": false, + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + }, + { + "node": { + "occurrences": { + "edges": [ + { + "node": { + "path": "pkg/utils/helpers.go", + "beginLine": 88, + "endLine": 88, + "issue": { + "title": "Function too complex", + "shortcode": "GO-R1003", + "shortDescription": "Cyclomatic complexity exceeds threshold", + "category": "ANTI_PATTERN", + "severity": "MINOR", + "isRecommended": false, + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + } + ] + } + } +} diff --git a/command/issues/tests/golden_files/pr_scope_output.json b/command/issues/tests/golden_files/pr_scope_output.json new file mode 100644 index 00000000..fe55294e --- /dev/null +++ b/command/issues/tests/golden_files/pr_scope_output.json @@ -0,0 +1,13 @@ +[ + { + "path": "pkg/api/handler.go", + "begin_line": 33, + "end_line": 33, + "issue_code": "GO-R1002", + "title": "Unused variable declared", + "category": "BUG_RISK", + "severity": "MINOR", + "source": "", + "analyzer": "go" + } +] diff --git a/command/issues/tests/golden_files/pr_scope_response.json b/command/issues/tests/golden_files/pr_scope_response.json new file mode 100644 index 00000000..dc772e6a --- /dev/null +++ b/command/issues/tests/golden_files/pr_scope_response.json @@ -0,0 +1,28 @@ +{ + "repository": { + "pullRequest": { + "issueOccurrences": { + "edges": [ + { + "node": { + "path": "pkg/api/handler.go", + "beginLine": 33, + "endLine": 33, + "title": "Unused variable declared", + "issue": { + "shortcode": "GO-R1002", + "shortDescription": "Variable declared but never used", + "category": "BUG_RISK", + "severity": "MINOR", + "analyzer": { + "name": "Go", + "shortcode": "go" + } + } + } + } + ] + } + } + } +} diff --git a/command/issues/tests/issues_test.go b/command/issues/tests/issues_test.go new file mode 100644 index 00000000..4c4a0525 --- /dev/null +++ b/command/issues/tests/issues_test.go @@ -0,0 +1,135 @@ +package tests + +import ( + "bytes" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/command/cmddeps" + issuesCmd "github.com/deepsourcelabs/cli/command/issues" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/internal/testutil" +) + +func goldenPath(name string) string { + _, callerFile, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(callerFile), "golden_files", name) +} + +func TestIssuesDefaultBranch(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "issues(first: $limit)": goldenPath("default_branch_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := issuesCmd.NewCmdIssuesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("default_branch_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestIssuesCommitScope(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "checks {": goldenPath("commit_scope_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := issuesCmd.NewCmdIssuesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--commit", "abc123f", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("commit_scope_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestIssuesPRScope(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "issueOccurrences(first: $limit)": goldenPath("pr_scope_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := issuesCmd.NewCmdIssuesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--pr", "42", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("pr_scope_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestIssuesFilterBySeverity(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "issues(first: $limit)": goldenPath("filtered_severity_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := issuesCmd.NewCmdIssuesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--severity", "critical", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("filtered_severity_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} diff --git a/command/metrics/metrics.go b/command/metrics/metrics.go new file mode 100644 index 00000000..3ae97711 --- /dev/null +++ b/command/metrics/metrics.go @@ -0,0 +1,515 @@ +package metrics + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + "strings" + + "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/deepsource/metrics" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + "github.com/deepsourcelabs/cli/internal/vcs" + "github.com/pterm/pterm" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" +) + +type MetricsOptions struct { + RepoArg string + CommitOid string + PRNumber int + OutputFormat string + OutputFile string + Verbose bool + LimitArg int + repoSlug string + repoMetrics []metrics.RepositoryMetric + runMetrics *metrics.RunMetrics + prMetrics *metrics.PRMetrics + deps *cmddeps.Deps +} + +func (opts *MetricsOptions) stdout() io.Writer { + if opts.deps != nil && opts.deps.Stdout != nil { + return opts.deps.Stdout + } + return os.Stdout +} + +func NewCmdMetrics() *cobra.Command { + return NewCmdMetricsWithDeps(nil) +} + +func NewCmdMetricsWithDeps(deps *cmddeps.Deps) *cobra.Command { + opts := MetricsOptions{ + OutputFormat: "pretty", + LimitArg: 30, + deps: deps, + } + + doc := heredoc.Docf(` + View code metrics for a repository. + + By default, shows metrics from the default branch. Use %[1]s or %[2]s + to scope to a specific analysis run or pull request. + + Examples: + %[3]s + %[4]s + %[5]s + %[6]s + `, + style.Yellow("--commit"), + style.Yellow("--pr"), + style.Cyan("deepsource metrics"), + style.Cyan("deepsource metrics --repo owner/repo"), + style.Cyan("deepsource metrics --commit abc123f"), + style.Cyan("deepsource metrics --pr 123"), + ) + + cmd := &cobra.Command{ + Use: "metrics [flags]", + Short: "View repository metrics", + Long: doc, + RunE: func(cmd *cobra.Command, args []string) error { + return opts.Run(cmd.Context()) + }, + } + + // --repo, -r flag + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Repository (owner/name)") + + // Scoping flags + cmd.Flags().StringVar(&opts.CommitOid, "commit", "", "Scope to a specific analysis run by commit OID") + cmd.Flags().IntVar(&opts.PRNumber, "pr", 0, "Scope to a specific pull request by number") + + // --output flag + cmd.Flags().StringVarP(&opts.OutputFormat, "output", "o", "pretty", "Output format: pretty, table, json, yaml") + + // --output-file flag + cmd.Flags().StringVar(&opts.OutputFile, "output-file", "", "Write output to a file instead of stdout") + + // --verbose, -v flag + cmd.Flags().BoolVarP(&opts.Verbose, "verbose", "v", false, "Show shortcodes and descriptions") + + // --limit, -l flag + cmd.Flags().IntVarP(&opts.LimitArg, "limit", "l", 30, "Maximum number of metrics to fetch") + + // Completions + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed grouped output", + "table\tTabular output", + "json\tJSON output", + "yaml\tYAML output", + }, cobra.ShellCompDirectiveNoFileComp + }) + + // Mutual exclusivity + cmd.MarkFlagsMutuallyExclusive("commit", "pr") + + return cmd +} + +func (opts *MetricsOptions) Run(ctx context.Context) error { + // Load configuration + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + cfg, err := cfgMgr.Load() + if err != nil { + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return err + } + + // Resolve remote repository + remote, err := vcs.ResolveRemote(opts.RepoArg) + if err != nil { + return err + } + opts.repoSlug = remote.Owner + "/" + remote.RepoName + + // Create DeepSource client + var client *deepsource.Client + if opts.deps != nil && opts.deps.Client != nil { + client = opts.deps.Client + } else { + client, err = deepsource.New(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: cfgMgr.TokenRefreshCallback(), + }) + if err != nil { + return err + } + } + + // Fetch metrics based on scope + switch { + case opts.CommitOid != "": + opts.runMetrics, err = client.GetRunMetrics(ctx, opts.CommitOid) + case opts.PRNumber > 0: + opts.prMetrics, err = client.GetPRMetrics(ctx, remote.Owner, remote.RepoName, remote.VCSProvider, opts.PRNumber) + default: + opts.repoMetrics, err = client.GetRepoMetrics(ctx, remote.Owner, remote.RepoName, remote.VCSProvider) + } + if err != nil { + return err + } + + // Apply client-side limit + if opts.LimitArg > 0 { + if metricsList := opts.getMetrics(); len(metricsList) > opts.LimitArg { + truncated := metricsList[:opts.LimitArg] + switch { + case opts.runMetrics != nil: + opts.runMetrics.Metrics = truncated + case opts.prMetrics != nil: + opts.prMetrics.Metrics = truncated + default: + opts.repoMetrics = truncated + } + } + } + + // Output based on format + switch opts.OutputFormat { + case "json": + return opts.outputJSON() + case "yaml": + return opts.outputYAML() + case "table": + return opts.outputTable() + default: + return opts.outputHuman() + } +} + +func (opts *MetricsOptions) getMetrics() []metrics.RepositoryMetric { + switch { + case opts.runMetrics != nil: + return opts.runMetrics.Metrics + case opts.prMetrics != nil: + return opts.prMetrics.Metrics + default: + return opts.repoMetrics + } +} + +func (opts *MetricsOptions) outputTable() error { + metricsList := opts.getMetrics() + + if len(metricsList) == 0 { + pterm.Info.Println("No metrics found.") + return nil + } + + // Show context header for run/PR scopes + if opts.runMetrics != nil { + commitShort := opts.runMetrics.CommitOid + if len(commitShort) > 8 { + commitShort = commitShort[:8] + } + pterm.DefaultBox.WithTitle("Run Metrics").WithTitleTopCenter().Println( + fmt.Sprintf("%s %s\n%s %s", + pterm.Bold.Sprint("Commit:"), + commitShort, + pterm.Bold.Sprint("Branch:"), + opts.runMetrics.BranchName, + ), + ) + pterm.Println() + } else if opts.prMetrics != nil { + pterm.DefaultBox.WithTitle("Pull Request Metrics").WithTitleTopCenter().Println( + fmt.Sprintf("%s #%d\n%s %s", + pterm.Bold.Sprint("PR:"), + opts.prMetrics.Number, + pterm.Bold.Sprint("Branch:"), + opts.prMetrics.Branch, + ), + ) + pterm.Println() + } + + // Build metrics table + header := []string{"Metric", "Key", "Value", "Threshold", "Status"} + data := [][]string{header} + + for _, m := range metricsList { + for _, item := range m.Items { + threshold := "-" + if item.Threshold != nil { + threshold = fmt.Sprintf("%d%s", *item.Threshold, m.Unit) + } + + status := "-" + if item.ThresholdStatus != "" { + status = formatStatus(item.ThresholdStatus) + } + + value := item.LatestValueDisplay + if value == "" && item.LatestValue != nil { + value = fmt.Sprintf("%.1f%s", *item.LatestValue, m.Unit) + } + if value == "" { + value = "-" + } + + data = append(data, []string{ + m.Name, + item.Key, + value, + threshold, + status, + }) + } + } + + pterm.DefaultTable.WithHasHeader().WithData(data).Render() + + // Show changeset stats for run scope + if opts.runMetrics != nil && opts.runMetrics.ChangesetStats != nil { + pterm.Println() + opts.outputChangesetStats() + } + + return nil +} + +func (opts *MetricsOptions) outputChangesetStats() { + stats := opts.runMetrics.ChangesetStats + + pterm.DefaultSection.Println("Changeset Coverage") + + header := []string{"Type", "Overall", "Covered", "New", "New Covered"} + data := [][]string{header} + + // Lines + data = append(data, []string{ + "Lines", + formatIntPtr(stats.Lines.Overall), + formatIntPtr(stats.Lines.OverallCovered), + formatIntPtr(stats.Lines.New), + formatIntPtr(stats.Lines.NewCovered), + }) + + // Branches + data = append(data, []string{ + "Branches", + formatIntPtr(stats.Branches.Overall), + formatIntPtr(stats.Branches.OverallCovered), + formatIntPtr(stats.Branches.New), + formatIntPtr(stats.Branches.NewCovered), + }) + + // Conditions + data = append(data, []string{ + "Conditions", + formatIntPtr(stats.Conditions.Overall), + formatIntPtr(stats.Conditions.OverallCovered), + formatIntPtr(stats.Conditions.New), + formatIntPtr(stats.Conditions.NewCovered), + }) + + pterm.DefaultTable.WithHasHeader().WithData(data).Render() +} + +func (opts *MetricsOptions) outputHuman() error { + metricsList := opts.getMetrics() + + if len(metricsList) == 0 { + pterm.Info.Println("No metrics found.") + return nil + } + + totalItems := 0 + for _, m := range metricsList { + // Metric name header (bold) + header := pterm.Bold.Sprint(m.Name) + if opts.Verbose && m.Shortcode != "" { + header += fmt.Sprintf(" (%s)", m.Shortcode) + } + fmt.Println(header) + + if opts.Verbose && m.Description != "" { + fmt.Printf(" %s\n", m.Description) + } + + for _, item := range m.Items { + value := formatValueDisplay(item, m.Unit) + colored := colorByStatus(value, item.ThresholdStatus) + threshold := formatThresholdDisplay(item, m.Unit) + + fmt.Printf(" %s: %s%s\n", item.Key, colored, threshold) + totalItems++ + } + + fmt.Println() + } + + // Changeset stats for run scope + if opts.runMetrics != nil && opts.runMetrics.ChangesetStats != nil { + opts.outputHumanChangesetStats() + fmt.Println() + } + + opts.printFooter(totalItems) + return nil +} + +func (opts *MetricsOptions) outputHumanChangesetStats() { + stats := opts.runMetrics.ChangesetStats + + fmt.Println(pterm.Bold.Sprint("Changeset Coverage")) + printChangesetLine("Lines", stats.Lines) + printChangesetLine("Branches", stats.Branches) + printChangesetLine("Conditions", stats.Conditions) +} + +func formatValueDisplay(item metrics.RepositoryMetricItem, unit string) string { + if item.LatestValueDisplay != "" { + return item.LatestValueDisplay + } + if item.LatestValue != nil { + return fmt.Sprintf("%.1f%s", *item.LatestValue, unit) + } + return "-" +} + +func formatThresholdDisplay(item metrics.RepositoryMetricItem, unit string) string { + if item.Threshold == nil { + return "" + } + return fmt.Sprintf(" (threshold: %d%s)", *item.Threshold, unit) +} + +func colorByStatus(text string, status string) string { + switch strings.ToUpper(status) { + case "PASSING": + return pterm.Green(text) + case "FAILING": + return pterm.Red(text) + default: + return text + } +} + +func printChangesetLine(label string, counts metrics.ChangesetStatsCounts) { + overall := intPtrVal(counts.Overall) + overallCovered := intPtrVal(counts.OverallCovered) + new := intPtrVal(counts.New) + newCovered := intPtrVal(counts.NewCovered) + fmt.Printf(" %s: %d covered of %d overall, %d covered of %d new\n", + label, overallCovered, overall, newCovered, new) +} + +func intPtrVal(v *int) int { + if v == nil { + return 0 + } + return *v +} + +func (opts *MetricsOptions) printFooter(count int) { + fmt.Printf("Showing %d metric(s) in %s", count, opts.repoSlug) + switch { + case opts.runMetrics != nil: + commitShort := opts.runMetrics.CommitOid + if len(commitShort) > 7 { + commitShort = commitShort[:7] + } + fmt.Printf(" from commit %s on %s\n", commitShort, opts.runMetrics.BranchName) + case opts.prMetrics != nil: + fmt.Printf(" from PR #%d (%s -> %s)\n", opts.prMetrics.Number, opts.prMetrics.Branch, opts.prMetrics.BaseBranch) + default: + fmt.Println(" from default branch") + } +} + +func formatStatus(status string) string { + switch strings.ToUpper(status) { + case "PASSING": + return pterm.Green("Passing") + case "FAILING": + return pterm.Red("Failing") + default: + return status + } +} + +func formatIntPtr(val *int) string { + if val == nil { + return "-" + } + return fmt.Sprintf("%d", *val) +} + +func (opts *MetricsOptions) outputJSON() error { + var data []byte + var err error + + switch { + case opts.runMetrics != nil: + data, err = json.MarshalIndent(opts.runMetrics, "", " ") + case opts.prMetrics != nil: + data, err = json.MarshalIndent(opts.prMetrics, "", " ") + default: + data, err = json.MarshalIndent(opts.repoMetrics, "", " ") + } + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format JSON output", err) + } + return opts.writeOutput(data, true) +} + +func (opts *MetricsOptions) outputYAML() error { + var data []byte + var err error + + switch { + case opts.runMetrics != nil: + data, err = yaml.Marshal(opts.runMetrics) + case opts.prMetrics != nil: + data, err = yaml.Marshal(opts.prMetrics) + default: + data, err = yaml.Marshal(opts.repoMetrics) + } + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format YAML output", err) + } + return opts.writeOutput(data, false) +} + +func (opts *MetricsOptions) writeOutput(data []byte, trailingNewline bool) error { + if opts.OutputFile == "" { + w := opts.stdout() + if trailingNewline { + fmt.Fprintln(w, string(data)) + } else { + fmt.Fprint(w, string(data)) + } + return nil + } + + if err := os.WriteFile(opts.OutputFile, data, 0644); err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to write output file", err) + } + pterm.Printf("Saved metrics to %s!\n", opts.OutputFile) + return nil +} diff --git a/command/metrics/tests/golden_files/pr_metrics_output.json b/command/metrics/tests/golden_files/pr_metrics_output.json new file mode 100644 index 00000000..f88d6ef9 --- /dev/null +++ b/command/metrics/tests/golden_files/pr_metrics_output.json @@ -0,0 +1,26 @@ +{ + "number": 42, + "title": "Add unit tests for metrics", + "base_branch": "main", + "branch": "feature/metrics-tests", + "metrics": [ + { + "name": "Line Coverage", + "shortcode": "LCV", + "description": "Percentage of lines covered by tests", + "positive_direction": "UP", + "unit": "%", + "is_reported": true, + "is_threshold_enforced": true, + "items": [ + { + "key": "aggregate", + "threshold": 80, + "latest_value": 90.1, + "latest_value_display": "90.1%", + "threshold_status": "PASSING" + } + ] + } + ] +} diff --git a/command/metrics/tests/golden_files/pr_metrics_response.json b/command/metrics/tests/golden_files/pr_metrics_response.json new file mode 100644 index 00000000..f26a1a5d --- /dev/null +++ b/command/metrics/tests/golden_files/pr_metrics_response.json @@ -0,0 +1,30 @@ +{ + "repository": { + "pullRequest": { + "number": 42, + "title": "Add unit tests for metrics", + "baseBranch": "main", + "branch": "feature/metrics-tests", + "metrics": [ + { + "name": "Line Coverage", + "shortcode": "LCV", + "description": "Percentage of lines covered by tests", + "positiveDirection": "UP", + "unit": "%", + "isReported": true, + "isThresholdEnforced": true, + "items": [ + { + "key": "aggregate", + "threshold": 80, + "latestValue": 90.1, + "latestValueDisplay": "90.1%", + "thresholdStatus": "PASSING" + } + ] + } + ] + } + } +} diff --git a/command/metrics/tests/golden_files/repo_metrics_output.json b/command/metrics/tests/golden_files/repo_metrics_output.json new file mode 100644 index 00000000..d4a5b34d --- /dev/null +++ b/command/metrics/tests/golden_files/repo_metrics_output.json @@ -0,0 +1,38 @@ +[ + { + "name": "Line Coverage", + "shortcode": "LCV", + "description": "Percentage of lines covered by tests", + "positive_direction": "UP", + "unit": "%", + "is_reported": true, + "is_threshold_enforced": true, + "items": [ + { + "key": "aggregate", + "threshold": 80, + "latest_value": 85.5, + "latest_value_display": "85.5%", + "threshold_status": "PASSING" + } + ] + }, + { + "name": "Documentation Coverage", + "shortcode": "DOC", + "description": "Percentage of documented public symbols", + "positive_direction": "UP", + "unit": "%", + "is_reported": true, + "is_threshold_enforced": false, + "items": [ + { + "key": "aggregate", + "threshold": 60, + "latest_value": 42.3, + "latest_value_display": "42.3%", + "threshold_status": "FAILING" + } + ] + } +] diff --git a/command/metrics/tests/golden_files/repo_metrics_response.json b/command/metrics/tests/golden_files/repo_metrics_response.json new file mode 100644 index 00000000..7e9dcd3f --- /dev/null +++ b/command/metrics/tests/golden_files/repo_metrics_response.json @@ -0,0 +1,42 @@ +{ + "repository": { + "metrics": [ + { + "name": "Line Coverage", + "shortcode": "LCV", + "description": "Percentage of lines covered by tests", + "positiveDirection": "UP", + "unit": "%", + "isReported": true, + "isThresholdEnforced": true, + "items": [ + { + "key": "aggregate", + "threshold": 80, + "latestValue": 85.5, + "latestValueDisplay": "85.5%", + "thresholdStatus": "PASSING" + } + ] + }, + { + "name": "Documentation Coverage", + "shortcode": "DOC", + "description": "Percentage of documented public symbols", + "positiveDirection": "UP", + "unit": "%", + "isReported": true, + "isThresholdEnforced": false, + "items": [ + { + "key": "aggregate", + "threshold": 60, + "latestValue": 42.3, + "latestValueDisplay": "42.3%", + "thresholdStatus": "FAILING" + } + ] + } + ] + } +} diff --git a/command/metrics/tests/golden_files/run_metrics_output.json b/command/metrics/tests/golden_files/run_metrics_output.json new file mode 100644 index 00000000..96a729c1 --- /dev/null +++ b/command/metrics/tests/golden_files/run_metrics_output.json @@ -0,0 +1,25 @@ +{ + "commit_oid": "abc123f4567890abcdef1234567890abcdef1234", + "branch_name": "main", + "status": "SUCCESS", + "metrics": [ + { + "name": "Line Coverage", + "shortcode": "LCV", + "description": "Percentage of lines covered by tests", + "positive_direction": "UP", + "unit": "%", + "is_reported": true, + "is_threshold_enforced": true, + "items": [ + { + "key": "aggregate", + "threshold": 80, + "latest_value": 78.2, + "latest_value_display": "78.2%", + "threshold_status": "FAILING" + } + ] + } + ] +} diff --git a/command/metrics/tests/golden_files/run_metrics_response.json b/command/metrics/tests/golden_files/run_metrics_response.json new file mode 100644 index 00000000..ed72ff45 --- /dev/null +++ b/command/metrics/tests/golden_files/run_metrics_response.json @@ -0,0 +1,36 @@ +{ + "run": { + "commitOid": "abc123f4567890abcdef1234567890abcdef1234", + "branchName": "main", + "status": "SUCCESS", + "changesetStats": null, + "checks": { + "edges": [ + { + "node": { + "metrics": [ + { + "name": "Line Coverage", + "shortcode": "LCV", + "description": "Percentage of lines covered by tests", + "positiveDirection": "UP", + "unit": "%", + "isReported": true, + "isThresholdEnforced": true, + "items": [ + { + "key": "aggregate", + "threshold": 80, + "latestValue": 78.2, + "latestValueDisplay": "78.2%", + "thresholdStatus": "FAILING" + } + ] + } + ] + } + } + ] + } + } +} diff --git a/command/metrics/tests/metrics_test.go b/command/metrics/tests/metrics_test.go new file mode 100644 index 00000000..180c6c06 --- /dev/null +++ b/command/metrics/tests/metrics_test.go @@ -0,0 +1,106 @@ +package tests + +import ( + "bytes" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/command/cmddeps" + metricsCmd "github.com/deepsourcelabs/cli/command/metrics" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/internal/testutil" +) + +func goldenPath(name string) string { + _, callerFile, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(callerFile), "golden_files", name) +} + +func TestMetricsDefaultBranch(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "GetRepoMetrics": goldenPath("repo_metrics_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := metricsCmd.NewCmdMetricsWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("repo_metrics_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestMetricsCommitScope(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "changesetStats {": goldenPath("run_metrics_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := metricsCmd.NewCmdMetricsWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--commit", "abc123f", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("run_metrics_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestMetricsPRScope(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "GetPRMetrics": goldenPath("pr_metrics_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := metricsCmd.NewCmdMetricsWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--pr", "42", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("pr_metrics_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} diff --git a/command/repo/repo.go b/command/repo/repo.go deleted file mode 100644 index 52223fc5..00000000 --- a/command/repo/repo.go +++ /dev/null @@ -1,22 +0,0 @@ -package repo - -import ( - "github.com/spf13/cobra" - - "github.com/deepsourcelabs/cli/command/repo/status" - "github.com/deepsourcelabs/cli/command/repo/view" -) - -// Options holds the metadata. -type Options struct{} - -// NewCmdVersion returns the current version of cli being used -func NewCmdRepo() *cobra.Command { - cmd := &cobra.Command{ - Use: "repo", - Short: "Operations related to the project repository", - } - cmd.AddCommand(status.NewCmdRepoStatus()) - cmd.AddCommand(view.NewCmdRepoView()) - return cmd -} diff --git a/command/repo/status/status.go b/command/repo/status/status.go deleted file mode 100644 index 42dea01b..00000000 --- a/command/repo/status/status.go +++ /dev/null @@ -1,91 +0,0 @@ -package status - -import ( - "context" - "fmt" - - "github.com/MakeNowJust/heredoc" - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/utils" - "github.com/pterm/pterm" - "github.com/spf13/cobra" -) - -type RepoStatusOptions struct { - RepoArg string - TokenExpired bool - SelectedRemote *utils.RemoteData -} - -// NewCmdRepoStatus handles querying the activation status of the repo supplied as an arg -func NewCmdRepoStatus() *cobra.Command { - opts := RepoStatusOptions{ - RepoArg: "", - TokenExpired: config.Cfg.IsExpired(), - } - - doc := heredoc.Docf(` - View the activation status for the repository. - - To check if the current repository is activated on DeepSource, run: - %[1]s - - To check if a specific repository is activated on DeepSource, use the %[2]s flag: - %[3]s - `, utils.Cyan("deepsource repo status"), utils.Yellow("--repo"), utils.Cyan("deepsource repo status --repo repo_name")) - - cmd := &cobra.Command{ - Use: "status", - Short: "View the activation status for the repository.", - Long: doc, - Args: utils.NoArgs, - RunE: func(cmd *cobra.Command, args []string) error { - return opts.Run() - }, - } - - // --repo, -r flag - cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Get the activation status of the specified repository") - return cmd -} - -func (opts *RepoStatusOptions) Run() (err error) { - // Fetch config - cfg, err := config.GetConfig() - if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) - } - err = cfg.VerifyAuthentication() - if err != nil { - return err - } - - // Get the remote repository URL for which issues have to - // be listed - opts.SelectedRemote, err = utils.ResolveRemote(opts.RepoArg) - if err != nil { - return err - } - // Use the SDK to find the activation status - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return err - } - ctx := context.Background() - statusResponse, err := deepsource.GetRepoStatus(ctx, opts.SelectedRemote.Owner, opts.SelectedRemote.RepoName, opts.SelectedRemote.VCSProvider) - if err != nil { - return err - } - - // Check response and show corresponding output - if statusResponse.Activated { - pterm.Info.Println("Analysis active on DeepSource (deepsource.io)") - } else { - pterm.Info.Println("DeepSource analysis is currently not activated on this repository.") - } - return nil -} diff --git a/command/repo/view/view.go b/command/repo/view/view.go deleted file mode 100644 index b912da56..00000000 --- a/command/repo/view/view.go +++ /dev/null @@ -1,106 +0,0 @@ -package view - -import ( - "context" - "errors" - "fmt" - "strings" - - "github.com/MakeNowJust/heredoc" - "github.com/cli/browser" - "github.com/deepsourcelabs/cli/config" - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/utils" - "github.com/spf13/cobra" -) - -var VCSMap = map[string]string{ - "GITHUB": "gh", - "GITHUB_ENTERPRISE": "ghe", - "GITLAB": "gl", - "BITBUCKET": "bb", - "BITBUCKET_DATACENTER": "bbdc", - "ADS": "ads", -} - -type RepoViewOptions struct { - RepoArg string - TokenExpired bool - SelectedRemote *utils.RemoteData -} - -func NewCmdRepoView() *cobra.Command { - opts := RepoViewOptions{ - RepoArg: "", - SelectedRemote: &utils.RemoteData{}, - } - - doc := heredoc.Docf(` - Open the DeepSource dashboard of a repository. - - Run %[1]s to open the DeepSource dashboard inside the browser. - `, utils.Cyan("deepsource repo view")) - - cmd := &cobra.Command{ - Use: "view", - Short: "Open the DeepSource dashboard of a repository", - Long: doc, - Args: utils.NoArgs, - RunE: func(cmd *cobra.Command, args []string) error { - return opts.Run() - }, - } - - // --repo, -r flag - cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Open the DeepSource dashboard of the specified repository") - return cmd -} - -func (opts *RepoViewOptions) Run() (err error) { - // Fetch config - cfg, err := config.GetConfig() - if err != nil { - return fmt.Errorf("Error while reading DeepSource CLI config : %v", err) - } - err = cfg.VerifyAuthentication() - if err != nil { - return err - } - - // Get the remote repository URL for which issues have to - // be listed - opts.SelectedRemote, err = utils.ResolveRemote(opts.RepoArg) - if err != nil { - return err - } - - // Making the "isActivated" (repo status) query again just to confirm if the user has access to that repo - deepsource, err := deepsource.New(deepsource.ClientOpts{ - Token: config.Cfg.Token, - HostName: config.Cfg.Host, - }) - if err != nil { - return err - } - ctx := context.Background() - _, err = deepsource.GetRepoStatus(ctx, opts.SelectedRemote.Owner, opts.SelectedRemote.RepoName, opts.SelectedRemote.VCSProvider) - if err != nil { - if strings.Contains(err.Error(), "Signature has expired") { - return errors.New("The token has expired. Please refresh the token using the command `deepsource auth refresh`") - } - - if strings.Contains(err.Error(), "Repository matching query does not exist") { - return errors.New("Unauthorized access. Please login if you haven't using the command `deepsource auth login`") - } - } - - // If the user has access to repo, frame the full URL of the repo and open it on the - // default browser - VCSShortcode := VCSMap[opts.SelectedRemote.VCSProvider] - - // Framing the complete URL - dashboardURL := fmt.Sprintf("https://%s/%s/%s/%s/", config.Cfg.Host, VCSShortcode, opts.SelectedRemote.Owner, opts.SelectedRemote.RepoName) - fmt.Printf("Press Enter to open %s in your browser...", dashboardURL) - fmt.Scanln() - return browser.OpenURL(dashboardURL) -} diff --git a/command/report/constants.go b/command/report/constants.go index 35afd020..a518aab5 100644 --- a/command/report/constants.go +++ b/command/report/constants.go @@ -10,7 +10,7 @@ Available commands are: Help: Use 'deepsource --help' for more information about the command. Documentation: - https://deepsource.io/docs/cli + https://deepsource.com/docs/cli ` reportUsageMessage = ` @@ -30,7 +30,7 @@ Notes: - Pass either '--value' or '--value-file'. If both are passed, contents of '--value' will be considered. - '--analyzer-type' is optional. If not passed, it will default to 'core'. Documentation: - https://deepsource.io/docs/cli#report + https://deepsource.com/docs/cli#report ` reportGraphqlQuery = "mutation($input: CreateArtifactInput!) {\r\n createArtifact(input: $input) {\r\n ok\r\n message\r\n error\r\n }\r\n}" reportGraphqlQueryOld = "mutation($input: CreateArtifactInput!) {\r\n createArtifact(input: $input) {\r\n ok\r\n error\r\n }\r\n}" diff --git a/command/report/report.go b/command/report/report.go index 364c9dbc..5d811dd1 100644 --- a/command/report/report.go +++ b/command/report/report.go @@ -1,20 +1,19 @@ package report import ( - "encoding/base64" + "context" "encoding/json" - "errors" "fmt" - "log" "os" - "strings" - "time" - "github.com/DataDog/zstd" "github.com/MakeNowJust/heredoc" - "github.com/deepsourcelabs/cli/utils" - "github.com/getsentry/sentry-go" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/style" + "github.com/deepsourcelabs/cli/internal/container" + "github.com/deepsourcelabs/cli/internal/interfaces" + reportsvc "github.com/deepsourcelabs/cli/internal/services/report" "github.com/spf13/cobra" + "gopkg.in/yaml.v3" ) type ReportOptions struct { @@ -30,10 +29,17 @@ type ReportOptions struct { OIDCRequestUrl string // url to manually get an OIDC token DeepSourceHostEndpoint string // DeepSource host endpoint where the app is running. Defaults to the cloud endpoint https://app.deepsource.com OIDCProvider string // OIDC provider to use for authentication + Output string // Output format: table, json, yaml } -// NewCmdVersion returns the current version of cli being used +// NewCmdReport returns the command to report artifacts to DeepSource func NewCmdReport() *cobra.Command { + return NewCmdReportWithDeps(nil) +} + +// NewCmdReportWithDeps builds the report command with injected dependencies. +// When deps is nil, it will be created at execution time to respect flags/env. +func NewCmdReportWithDeps(deps *container.Container) *cobra.Command { opts := ReportOptions{} doc := heredoc.Docf(` @@ -47,17 +53,28 @@ func NewCmdReport() *cobra.Command { You can flag combinations as well: %[5]s - `, utils.Yellow("--analyzer"), utils.Cyan("deepsource report --analyzer python"), utils.Yellow("--value"), utils.Cyan("deepsource report --key value"), utils.Cyan("deepsource report --analyzer go --value-file coverage.out")) + `, style.Yellow("--analyzer"), style.Cyan("deepsource report --analyzer python"), style.Yellow("--value"), style.Cyan("deepsource report --key value"), style.Cyan("deepsource report --analyzer go --value-file coverage.out")) cmd := &cobra.Command{ Use: "report", Short: "Report artifacts to DeepSource", Long: doc, - Args: utils.NoArgs, - Run: func(cmd *cobra.Command, args []string) { - returnCode := opts.Run() - sentry.Flush(2 * time.Second) - defer os.Exit(returnCode) + Args: args.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + if deps == nil { + deps = container.New() + } + svc := reportsvc.NewService(reportsvc.ServiceDeps{ + GitClient: deps.GitClient, + HTTPClient: deps.HTTPClient, + FileSystem: deps.FileSystem, + Environment: deps.Environment, + Sentry: deps.Sentry, + Output: deps.Output, + Workdir: os.Getwd, + }) + + return opts.Run(cmd.Context(), svc, deps.Output) }, } @@ -79,309 +96,105 @@ func NewCmdReport() *cobra.Command { cmd.Flags().StringVar(&opts.OIDCRequestUrl, "oidc-request-url", "", "OIDC provider's request URL to fetch an OIDC token") cmd.Flags().StringVar(&opts.DeepSourceHostEndpoint, "deepsource-host-endpoint", "https://app.deepsource.com", "DeepSource host endpoint where the app is running. Defaults to the cloud endpoint https://app.deepsource.com") cmd.Flags().StringVar(&opts.OIDCProvider, "oidc-provider", "", "OIDC provider to use for authentication. Supported providers: github-actions") + cmd.Flags().StringVar(&opts.Output, "output", "pretty", "Output format: pretty, table, json, yaml") // --skip-verify flag to skip SSL certificate verification while reporting test coverage data. cmd.Flags().BoolVar(&opts.SkipCertificateVerification, "skip-verify", false, "skip SSL certificate verification while sending the test coverage data") - return cmd -} + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed output", + "table\tHuman-readable table", + "json\tJSON output", + "yaml\tYAML output", + }, cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("analyzer-type", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "community\tCommunity analyzer", + }, cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("oidc-provider", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "github-actions\tGitHub Actions OIDC", + }, cobra.ShellCompDirectiveNoFileComp + }) -func (opts *ReportOptions) sanitize() { - opts.Analyzer = strings.TrimSpace(opts.Analyzer) - opts.AnalyzerType = strings.TrimSpace(opts.AnalyzerType) - opts.Key = strings.TrimSpace(opts.Key) - opts.Value = strings.TrimSpace(opts.Value) - opts.ValueFile = strings.TrimSpace(opts.ValueFile) - opts.DSN = strings.TrimSpace(os.Getenv("DEEPSOURCE_DSN")) - opts.OIDCRequestToken = strings.TrimSpace(opts.OIDCRequestToken) - opts.OIDCRequestUrl = strings.TrimSpace(opts.OIDCRequestUrl) - opts.DeepSourceHostEndpoint = strings.TrimSpace(opts.DeepSourceHostEndpoint) + return cmd } -func (opts *ReportOptions) validateKey() error { - supportedKeys := map[string]bool{ - "python": true, - "go": true, - "javascript": true, - "ruby": true, - "java": true, - "scala": true, - "php": true, - "csharp": true, - "cxx": true, - "rust": true, - "swift": true, - "kotlin": true, +func (opts *ReportOptions) Run(ctx context.Context, svc *reportsvc.Service, output interfaces.OutputWriter) error { + result, err := svc.Report(ctx, reportsvc.Options{ + Analyzer: opts.Analyzer, + AnalyzerType: opts.AnalyzerType, + Key: opts.Key, + Value: opts.Value, + ValueFile: opts.ValueFile, + SkipCertificateVerification: opts.SkipCertificateVerification, + DSN: opts.DSN, + UseOIDC: opts.UseOIDC, + OIDCRequestToken: opts.OIDCRequestToken, + OIDCRequestUrl: opts.OIDCRequestUrl, + DeepSourceHostEndpoint: opts.DeepSourceHostEndpoint, + OIDCProvider: opts.OIDCProvider, + }) + if err != nil { + if output != nil { + output.Errorf("%v\n", err) + } else { + fmt.Fprintln(os.Stderr, err) + } + return err } - if opts.Analyzer == "test-coverage" && !supportedKeys[opts.Key] { - return fmt.Errorf("DeepSource | Error | Invalid Key: %s (Supported Keys: %v)", opts.Key, supportedKeys) + if err := printReportResult(output, opts.Output, result); err != nil { + if output != nil { + output.Errorf("%v\n", err) + } else { + fmt.Fprintln(os.Stderr, err) + } + return err } return nil } -func (opts *ReportOptions) Run() int { - opts.sanitize() - if opts.UseOIDC { - dsn, err := utils.GetDSNFromOIDC(opts.OIDCRequestToken, opts.OIDCRequestUrl, opts.DeepSourceHostEndpoint, opts.OIDCProvider) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Failed to get DSN using OIDC:", err) - return 1 +func printReportResult(output interfaces.OutputWriter, format string, result *reportsvc.Result) error { + write := func(format string, args ...interface{}) { + if output == nil { + fmt.Printf(format, args...) + return } - opts.DSN = dsn - } - - if opts.DSN == "" { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Environment variable DEEPSOURCE_DSN not set (or) is empty. You can find it under the repository settings page") - return 1 - } - sentry.ConfigureScope(func(scope *sentry.Scope) { - scope.SetUser(sentry.User{ID: opts.DSN}) - }) - - ///////////////////// - // Command: report // - ///////////////////// - - // Get current path - currentDir, err := os.Getwd() - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to identify current directory") - sentry.CaptureException(err) - return 1 - } - sentry.ConfigureScope(func(scope *sentry.Scope) { - scope.SetExtra("currentDir", currentDir) - }) - - // validate key - if err := opts.validateKey(); err != nil { - fmt.Fprintln(os.Stderr, err) - sentry.CaptureException(err) - return 1 - } - - dsn, err := NewDSN(opts.DSN) - if err != nil { - fmt.Fprintln(os.Stderr, err) - sentry.CaptureException(err) - return 1 - } - - /////////////////////// - // Generate metadata // - /////////////////////// - - // Head Commit OID - headCommitOID, warning, err := gitGetHead(currentDir) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to get commit OID HEAD. Make sure you are running the CLI from a git repository") - log.Println(err) - sentry.CaptureException(err) - return 1 - } - sentry.ConfigureScope(func(scope *sentry.Scope) { - scope.SetExtra("headCommitOID", headCommitOID) - }) - - // Flag validation - if opts.Value == "" && opts.ValueFile == "" { - fmt.Fprintln(os.Stderr, "DeepSource | Error | '--value' (or) '--value-file' not passed") - return 1 - } - - var analyzerShortcode string - var analyzerType string - var artifactKey string - var artifactValue string - - analyzerShortcode = opts.Analyzer - analyzerType = opts.AnalyzerType - artifactKey = opts.Key - - if opts.Value != "" { - artifactValue = opts.Value + output.Printf(format, args...) } - if opts.ValueFile != "" { - // Check file size - _, err := os.Stat(opts.ValueFile) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to read specified value file:", opts.ValueFile) - sentry.CaptureException(err) - return 1 - } - - valueBytes, err := os.ReadFile(opts.ValueFile) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to read specified value file:", opts.ValueFile) - sentry.CaptureException(err) - return 1 + switch format { + case "", "pretty", "table": + write("DeepSource | Artifact published successfully\n\n") + write("Analyzer %s\n", result.Analyzer) + write("Key %s\n", result.Key) + if result.Message != "" { + write("Message %s\n", result.Message) } - - artifactValue = string(valueBytes) - } - - // Query DeepSource API to check if compression is supported - q := ReportQuery{Query: graphqlCheckCompressed} - - qBytes, err := json.Marshal(q) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Failed to marshal query:", err) - sentry.CaptureException(err) - return 1 - } - - r, err := makeQuery( - dsn.Protocol+"://"+dsn.Host+"/graphql/cli/", - qBytes, - "application/json", - opts.SkipCertificateVerification, - ) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Failed to make query:", err) - sentry.CaptureException(err) - return 1 - } - - // res is a struct to unmarshal the response to check if compression is supported - var res struct { - Data struct { - Type struct { - InputFields []struct { - Name string `json:"name"` - } `json:"inputFields"` - } `json:"__type"` - } `json:"data"` - } - - err = json.Unmarshal(r, &res) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Failed to unmarshal response:", err) - sentry.CaptureException(err) - return 1 - } - - reportMeta := make(map[string]interface{}) - reportMeta["workDir"] = currentDir - - // Compress the value if compression is supported - for _, inputField := range res.Data.Type.InputFields { - if inputField.Name == "compressed" { - // Compress the byte array - var compressedBytes []byte - compressLevel := 20 - compressedBytes, err = zstd.CompressLevel(compressedBytes, []byte(artifactValue), compressLevel) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Failed to compress value file:", opts.ValueFile) - sentry.CaptureException(err) - return 1 - } - - // Base64 encode the compressed byte array - artifactValue = base64.StdEncoding.EncodeToString(compressedBytes) - - // Set the compression flag - reportMeta["compressed"] = "True" + if result.Warning != "" { + write("%s", result.Warning) } - } - - //////////////////// - // Generate query // - //////////////////// - - queryInput := ReportQueryInput{ - AccessToken: dsn.Token, - CommitOID: headCommitOID, - ReporterName: "cli", - ReporterVersion: CliVersion, - Key: artifactKey, - Data: artifactValue, - AnalyzerShortcode: analyzerShortcode, - // AnalyzerType: analyzerType, // Add this in the later steps, only is the analyzer type is passed. - // This makes sure that the cli is always backwards compatible. The API is designed to accept analyzer type only if it is passed. - Metadata: reportMeta, - } - - query := ReportQuery{Query: reportGraphqlQuery} - // Check if analyzerType is passed and add it to the queryInput - if analyzerType != "" { - queryInput.AnalyzerType = analyzerType - } - // Pass queryInput to the query - query.Variables.Input = queryInput - - // Marshal request body - queryBodyBytes, err := json.Marshal(query) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to marshal query body") - sentry.CaptureException(err) - return 1 - } - - queryResponseBody, err := makeQuery( - dsn.Protocol+"://"+dsn.Host+"/graphql/cli/", - queryBodyBytes, - "application/json", - opts.SkipCertificateVerification, - ) - if err != nil { - // Make Query without message field. - query := ReportQuery{Query: reportGraphqlQueryOld} - query.Variables.Input = queryInput - queryBodyBytes, err := json.Marshal(query) + return nil + case "json": + payload, err := json.MarshalIndent(result, "", " ") if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to marshal query body") - sentry.CaptureException(err) - return 1 + return fmt.Errorf("DeepSource | Error | Failed to format JSON output: %w", err) } - queryResponseBody, err = makeQuery( - dsn.Protocol+"://"+dsn.Host+"/graphql/cli/", - queryBodyBytes, - "application/json", - opts.SkipCertificateVerification, - ) + write("%s\n", payload) + return nil + case "yaml": + payload, err := yaml.Marshal(result) if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Reporting failed |", err) - sentry.CaptureException(err) - return 1 + return fmt.Errorf("DeepSource | Error | Failed to format YAML output: %w", err) } + write("%s", payload) + return nil + default: + return fmt.Errorf("DeepSource | Error | Unsupported output format: %s", format) } - // Parse query's response body - queryResponse := QueryResponse{} - err = json.Unmarshal(queryResponseBody, &queryResponse) - if err != nil { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Unable to parse response body") - sentry.CaptureException(err) - return 1 - } - - // Check for errors in response body - // Response format: - // { - // "data": { - // "createArtifact": { - // "ok": false, - // "error": "No repository found attached with the access token: dasdsds" - // } - // } - // } - - if !queryResponse.Data.CreateArtifact.Ok { - fmt.Fprintln(os.Stderr, "DeepSource | Error | Reporting failed |", queryResponse.Data.CreateArtifact.Error) - sentry.CaptureException(errors.New(queryResponse.Data.CreateArtifact.Error)) - return 1 - } - - fmt.Printf("DeepSource | Artifact published successfully\n\n") - fmt.Printf("Analyzer %s\n", analyzerShortcode) - fmt.Printf("Key %s\n", artifactKey) - if queryResponse.Data.CreateArtifact.Message != "" { - fmt.Printf("Message %s\n", queryResponse.Data.CreateArtifact.Message) - } - if warning != "" { - fmt.Print(warning) - } - return 0 } diff --git a/command/report/tests/golden_files/report_success.txt b/command/report/tests/golden_files/report_success.txt index a321ec5e..f86ecf88 100644 --- a/command/report/tests/golden_files/report_success.txt +++ b/command/report/tests/golden_files/report_success.txt @@ -1,3 +1,6 @@ +DeepSource | Info | Preparing artifact... +DeepSource | Info | Checking compression support... +DeepSource | Info | Uploading artifact... DeepSource | Artifact published successfully Analyzer test-coverage diff --git a/command/report/tests/init_test.go b/command/report/tests/init_test.go index 214fc91c..f5f9798c 100644 --- a/command/report/tests/init_test.go +++ b/command/report/tests/init_test.go @@ -2,32 +2,111 @@ package tests import ( "fmt" + "io" "log" + "net" "net/http" + "net/http/httptest" + "net/url" "os" + "path/filepath" "testing" ) -var srv *http.Server +var srv *httptest.Server +var coverageFilePath string +var repoRoot string func TestMain(m *testing.M) { log.SetFlags(log.LstdFlags | log.Lshortfile) - srv := graphQLMockAPIServer() + var err error + srv, err = graphQLMockAPIServer() + if err != nil { + log.Printf("skipping report workflow tests: %v", err) + os.Exit(0) + } + if srv != nil { + if parsed, err := parseDSNFromURL(srv.URL); err == nil { + dsn = parsed + } + } + if err := prepareArtifacts(); err != nil { + log.Printf("failed to prepare report artifacts: %v", err) + } code := m.Run() srv.Close() os.Exit(code) } -func graphQLMockAPIServer() *http.Server { - srv = &http.Server{Addr: ":8081"} +func graphQLMockAPIServer() (*httptest.Server, error) { + listener, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + return nil, err + } + server := httptest.NewUnstartedServer(http.HandlerFunc(graphQLAPIMock)) + server.Listener = listener + server.Start() + return server, nil +} + +func parseDSNFromURL(raw string) (string, error) { + parsed, err := url.Parse(raw) + if err != nil { + return "", err + } + if parsed.Host == "" { + return "", fmt.Errorf("missing host in URL: %s", raw) + } + return fmt.Sprintf("%s://%s@%s", parsed.Scheme, "f59ab9314307", parsed.Host), nil +} + +func prepareArtifacts() error { + wd, err := os.Getwd() + if err != nil { + return err + } - http.HandleFunc("/", graphQLAPIMock) - go func() { - err := srv.ListenAndServe() - if err != nil && err != http.ErrServerClosed { - panic(fmt.Sprintf("failed to start HTTP mock server with error=%s", err)) + defaultRoot := filepath.Clean(filepath.Join(wd, "..", "..", "..")) + rootDir := defaultRoot + if envRoot := os.Getenv("CODE_PATH"); envRoot != "" { + coverageCandidate := filepath.Join(envRoot, "command", "report", "tests", "golden_files", "python_coverage.xml") + if _, err := os.Stat(coverageCandidate); err == nil { + rootDir = envRoot } - }() + } + repoRoot = rootDir + _ = os.Setenv("CODE_PATH", repoRoot) + + tempDir, err := os.MkdirTemp("", "deepsource-report") + if err != nil { + return err + } + + coverageSrc := filepath.Join(rootDir, "command", "report", "tests", "golden_files", "python_coverage.xml") + coverageDst := filepath.Join(tempDir, "python_coverage.xml") + if err := copyFile(coverageSrc, coverageDst); err != nil { + return err + } + coverageFilePath = coverageDst + + return nil +} + +func copyFile(src, dst string) error { + in, err := os.Open(src) + if err != nil { + return err + } + defer in.Close() + + out, err := os.Create(dst) + if err != nil { + return err + } + defer out.Close() - return srv + if _, err := io.Copy(out, in); err != nil { + return err + } + return out.Sync() } diff --git a/command/report/tests/report_workflow_test.go b/command/report/tests/report_workflow_test.go index da45a1e5..c0eb1cf3 100644 --- a/command/report/tests/report_workflow_test.go +++ b/command/report/tests/report_workflow_test.go @@ -8,12 +8,15 @@ import ( "log" "net/http" "os" - "os/exec" + "path/filepath" + "runtime" "strings" "testing" - "github.com/DataDog/zstd" + "github.com/klauspost/compress/zstd" "github.com/deepsourcelabs/cli/command/report" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/container" "github.com/google/go-cmp/cmp" ) @@ -24,12 +27,24 @@ import ( // Sample values to the run the analyzer on const ( - analyzer = "test-coverage" - commitOid = "b7ff1a5ecb0dce0541b935224f852ee98570bbd4" - dsn = "http://f59ab9314307@localhost:8081" - key = "python" + analyzer = "test-coverage" + key = "python" ) +var dsn = "http://f59ab9314307@localhost:8081" + +func testDir() string { + _, filename, _, ok := runtime.Caller(0) + if !ok { + return "." + } + return filepath.Dir(filename) +} + +func goldenFilePath(name string) string { + return filepath.Join(testDir(), "golden_files", name) +} + func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { // Read request request request body req, err := io.ReadAll(r.Body) @@ -44,7 +59,7 @@ func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) w.Header().Set("Content-Type", "application/json") - successResponseBodyData, err := os.ReadFile("./golden_files/report_grqphql_artifactmetadatainput_response_success.json") + successResponseBodyData, err := os.ReadFile(goldenFilePath("report_grqphql_artifactmetadatainput_response_success.json")) if err != nil { log.Println(err) return @@ -71,7 +86,13 @@ func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { } // Decompress zstd compressed data - decompressedData, err := zstd.Decompress(nil, decodedData) + decoder, err := zstd.NewReader(nil) + if err != nil { + log.Println(err) + return + } + defer decoder.Close() + decompressedData, err := decoder.DecodeAll(decodedData, nil) if err != nil { log.Println(err) return @@ -81,11 +102,11 @@ func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { reportQuery.Variables.Input.Data = string(decompressedData) // Read test graphql request body artifact file - requestBodyGoldenFilePath := "./golden_files/report_graphql_request_body.json" + requestBodyGoldenFilePath := goldenFilePath("report_graphql_request_body.json") if reportQuery.Variables.Input.AnalyzerType == "community" { // There's a separate goldenfile for request made with a type flag passed as community - requestBodyGoldenFilePath = "./golden_files/report_graphql_community_request_body.json" + requestBodyGoldenFilePath = goldenFilePath("report_graphql_community_request_body.json") } requestBodyData, err := os.ReadFile(requestBodyGoldenFilePath) @@ -112,14 +133,14 @@ func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { requestReportQuery.Variables.Input.ReporterVersion = report.CliVersion // Read test graphql success response body artifact file - successResponseBodyData, err := os.ReadFile("./golden_files/report_graphql_success_response_body.json") + successResponseBodyData, err := os.ReadFile(goldenFilePath("report_graphql_success_response_body.json")) if err != nil { log.Println(err) return } // Read test graphql error response body artifact file - errorResponseBodyData, err := os.ReadFile("./golden_files/report_graphql_error_response_body.json") + errorResponseBodyData, err := os.ReadFile(goldenFilePath("report_graphql_error_response_body.json")) if err != nil { log.Println(err) return @@ -134,9 +155,7 @@ func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { if want, got := requestReportQuery, reportQuery; cmp.Equal(want, got) { w.Write([]byte(successResponseBodyData)) } else { - if want != got { - log.Printf("Mismatch found:\nDiff: %s\n", cmp.Diff(want, got)) - } + log.Printf("Mismatch found:\nDiff: %s\n", cmp.Diff(want, got)) w.Write([]byte(errorResponseBodyData)) } } @@ -144,12 +163,12 @@ func graphQLAPIMock(w http.ResponseWriter, r *http.Request) { func TestReportKeyValueWorkflow(t *testing.T) { // Read test artifact file - data, err := os.ReadFile("/tmp/python_coverage.xml") + data, err := os.ReadFile(coverageFilePath) if err != nil { t.Error(err) } - cmd := exec.Command("/tmp/deepsource", + outStr, errStr, err := runReportCommand(t, []string{ "report", "--analyzer", analyzer, @@ -157,21 +176,7 @@ func TestReportKeyValueWorkflow(t *testing.T) { key, "--value", string(data), - ) - - // Set env variables - cmd.Env = os.Environ() - cmd.Env = append(cmd.Env, "DEEPSOURCE_DSN="+dsn) - cmd.Dir = os.Getenv("CODE_PATH") - - var stdout, stderr bytes.Buffer - - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - err = cmd.Run() - - outStr, errStr := stdout.String(), stderr.String() + }) log.Printf("== Run deepsource CLI command ==\n%s\n%s\n", outStr, errStr) if err != nil { @@ -180,7 +185,7 @@ func TestReportKeyValueWorkflow(t *testing.T) { t.Errorf("Error executing deepsource CLI command: %v", err) } - output, err := os.ReadFile("./golden_files/report_success.txt") + output, err := os.ReadFile(goldenFilePath("report_success.txt")) if err != nil { t.Fatal(err) } @@ -191,28 +196,15 @@ func TestReportKeyValueWorkflow(t *testing.T) { } func TestReportKeyValueFileWorkflow(t *testing.T) { - cmd := exec.Command("/tmp/deepsource", + outStr, errStr, err := runReportCommand(t, []string{ "report", "--analyzer", analyzer, "--key", key, "--value-file", - "/tmp/python_coverage.xml", - ) - - // Set env variables - cmd.Env = os.Environ() - cmd.Env = append(cmd.Env, "DEEPSOURCE_DSN="+dsn) - cmd.Dir = os.Getenv("CODE_PATH") - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - err := cmd.Run() - - outStr, errStr := stdout.String(), stderr.String() + coverageFilePath, + }) log.Printf("== Run deepsource CLI command ==\n%s\n%s\n", outStr, errStr) if err != nil { @@ -221,7 +213,7 @@ func TestReportKeyValueFileWorkflow(t *testing.T) { t.Errorf("Error executing deepsource CLI command: %v", err) } - output, err := os.ReadFile("./golden_files/report_success.txt") + output, err := os.ReadFile(goldenFilePath("report_success.txt")) if err != nil { t.Fatal(err) } @@ -232,7 +224,7 @@ func TestReportKeyValueFileWorkflow(t *testing.T) { } func TestReportAnalyzerTypeWorkflow(t *testing.T) { - cmd := exec.Command("/tmp/deepsource", + outStr, errStr, err := runReportCommand(t, []string{ "report", "--analyzer", analyzer, @@ -241,21 +233,8 @@ func TestReportAnalyzerTypeWorkflow(t *testing.T) { "--key", key, "--value-file", - "/tmp/python_coverage.xml", - ) - - // Set env variables - cmd.Env = os.Environ() - cmd.Env = append(cmd.Env, "DEEPSOURCE_DSN="+dsn) - cmd.Dir = os.Getenv("CODE_PATH") - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - err := cmd.Run() - - outStr, errStr := stdout.String(), stderr.String() + coverageFilePath, + }) log.Printf("== Run deepsource CLI command ==\n%s\n%s\n", outStr, errStr) if err != nil { @@ -264,7 +243,7 @@ func TestReportAnalyzerTypeWorkflow(t *testing.T) { t.Errorf("Error executing deepsource CLI command: %v", err) } - output, err := os.ReadFile("./golden_files/report_success.txt") + output, err := os.ReadFile(goldenFilePath("report_success.txt")) if err != nil { t.Fatal(err) } @@ -273,3 +252,36 @@ func TestReportAnalyzerTypeWorkflow(t *testing.T) { t.Errorf("Expected: %s, Got: %s", want, outStr) } } + +func runReportCommand(t *testing.T, args []string) (string, string, error) { + t.Helper() + + deps := container.NewTest() + if env, ok := deps.Environment.(*adapters.MockEnvironment); ok { + env.Set("DEEPSOURCE_DSN", dsn) + } + if git, ok := deps.GitClient.(*adapters.MockGitClient); ok { + git.SetHead("b9e678d8dcb43fa1340e8a0c579b2c642280dc27", "") + } + if srv != nil { + deps.HTTPClient = srv.Client() + } + + origDir, err := os.Getwd() + if err == nil { + if repoRoot != "" { + if chdirErr := os.Chdir(repoRoot); chdirErr == nil { + defer os.Chdir(origDir) + } + } + } + + cmd := report.NewCmdReportWithDeps(deps) + cmd.SetArgs(args[1:]) + + execErr := cmd.Execute() + if output, ok := deps.Output.(*adapters.BufferOutput); ok { + return output.String(), output.ErrorString(), execErr + } + return "", "", execErr +} diff --git a/command/repository/analyzers/analyzers.go b/command/repository/analyzers/analyzers.go new file mode 100644 index 00000000..6aa93c96 --- /dev/null +++ b/command/repository/analyzers/analyzers.go @@ -0,0 +1,135 @@ +package analyzers + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + + "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource/analyzers" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + reposvc "github.com/deepsourcelabs/cli/internal/services/repo" + "github.com/pterm/pterm" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" +) + +type AnalyzersOptions struct { + RepoArg string + Output string + deps *cmddeps.Deps +} + +func (opts *AnalyzersOptions) stdout() io.Writer { + if opts.deps != nil && opts.deps.Stdout != nil { + return opts.deps.Stdout + } + return os.Stdout +} + +func NewCmdAnalyzers() *cobra.Command { + return NewCmdAnalyzersWithDeps(nil) +} + +func NewCmdAnalyzersWithDeps(deps *cmddeps.Deps) *cobra.Command { + opts := AnalyzersOptions{ + deps: deps, + } + + doc := heredoc.Docf(` + List analyzers enabled on a repository. + + Run %[1]s to list the analyzers enabled on the current repository. + + To list analyzers for a specific repository, use the %[2]s flag: + %[3]s + `, style.Cyan("deepsource repository analyzers"), style.Yellow("--repo"), style.Cyan("deepsource repository analyzers --repo owner/repo")) + + cmd := &cobra.Command{ + Use: "analyzers", + Short: "List analyzers enabled on the repository", + Long: doc, + Args: args.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return opts.Run() + }, + } + + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "List analyzers for a specific repository") + cmd.Flags().StringVar(&opts.Output, "output", "pretty", "Output format: pretty, table, json, yaml") + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed output", + "table\tHuman-readable table", + "json\tJSON output", + "yaml\tYAML output", + }, cobra.ShellCompDirectiveNoFileComp + }) + + return cmd +} + +func (opts *AnalyzersOptions) Run() error { + ctx := context.Background() + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + var svc *reposvc.Service + if opts.deps != nil && opts.deps.RepoService != nil { + svc = opts.deps.RepoService + } else { + svc = reposvc.NewService(cfgMgr) + } + result, err := svc.EnabledAnalyzers(ctx, opts.RepoArg) + if err != nil { + return err + } + + return opts.printAnalyzers(result) +} + +func (opts *AnalyzersOptions) printAnalyzers(list []analyzers.Analyzer) error { + w := opts.stdout() + switch opts.Output { + case "", "pretty", "table": + if len(list) == 0 { + pterm.Println("No analyzers enabled on this repository.") + return nil + } + data := pterm.TableData{{"Name", "Shortcode"}} + for _, a := range list { + data = append(data, []string{a.Name, a.Shortcode}) + } + pterm.DefaultTable.WithHasHeader().WithData(data).Render() + pterm.Printf("\n%d analyzer(s) enabled\n", len(list)) + return nil + case "json": + payload, err := json.MarshalIndent(list, "", " ") + if err != nil { + return fmt.Errorf("DeepSource | Error | Failed to format JSON output: %w", err) + } + fmt.Fprintf(w, "%s\n", payload) + return nil + case "yaml": + payload, err := yaml.Marshal(list) + if err != nil { + return fmt.Errorf("DeepSource | Error | Failed to format YAML output: %w", err) + } + fmt.Fprint(w, string(payload)) + return nil + default: + return fmt.Errorf("DeepSource | Error | Unsupported output format: %s", opts.Output) + } +} diff --git a/command/repository/analyzers/tests/analyzers_test.go b/command/repository/analyzers/tests/analyzers_test.go new file mode 100644 index 00000000..4cfc8b53 --- /dev/null +++ b/command/repository/analyzers/tests/analyzers_test.go @@ -0,0 +1,52 @@ +package tests + +import ( + "bytes" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/command/cmddeps" + analyzersCmd "github.com/deepsourcelabs/cli/command/repository/analyzers" + "github.com/deepsourcelabs/cli/deepsource" + reposvc "github.com/deepsourcelabs/cli/internal/services/repo" + "github.com/deepsourcelabs/cli/internal/testutil" +) + +func goldenPath(name string) string { + _, callerFile, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(callerFile), "golden_files", name) +} + +func TestRepoAnalyzersEnabled(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "enabledAnalyzers {": goldenPath("enabled_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + svc := reposvc.NewTestService(cfgMgr, func(opts deepsource.ClientOpts) (reposvc.Client, error) { + return client, nil + }) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + ConfigMgr: cfgMgr, + Stdout: &buf, + RepoService: svc, + } + + cmd := analyzersCmd.NewCmdAnalyzersWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("enabled_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} diff --git a/command/repository/analyzers/tests/golden_files/enabled_output.json b/command/repository/analyzers/tests/golden_files/enabled_output.json new file mode 100644 index 00000000..2c7c4e54 --- /dev/null +++ b/command/repository/analyzers/tests/golden_files/enabled_output.json @@ -0,0 +1,12 @@ +[ + { + "Name": "Go", + "Shortcode": "go", + "MetaSchema": "" + }, + { + "Name": "Python", + "Shortcode": "python", + "MetaSchema": "" + } +] diff --git a/command/repository/analyzers/tests/golden_files/enabled_response.json b/command/repository/analyzers/tests/golden_files/enabled_response.json new file mode 100644 index 00000000..71cdfd2c --- /dev/null +++ b/command/repository/analyzers/tests/golden_files/enabled_response.json @@ -0,0 +1,20 @@ +{ + "repository": { + "enabledAnalyzers": { + "edges": [ + { + "node": { + "name": "Go", + "shortcode": "go" + } + }, + { + "node": { + "name": "Python", + "shortcode": "python" + } + } + ] + } + } +} diff --git a/command/repository/dashboard/dashboard.go b/command/repository/dashboard/dashboard.go new file mode 100644 index 00000000..11987885 --- /dev/null +++ b/command/repository/dashboard/dashboard.go @@ -0,0 +1,61 @@ +package dashboard + +import ( + "context" + "fmt" + + "github.com/MakeNowJust/heredoc" + "github.com/cli/browser" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + reposvc "github.com/deepsourcelabs/cli/internal/services/repo" + "github.com/spf13/cobra" +) + +type DashboardOptions struct { + RepoArg string +} + +func NewCmdDashboard() *cobra.Command { + opts := DashboardOptions{} + + doc := heredoc.Docf(` + Open the DeepSource dashboard for a repository. + + Run %[1]s to open the dashboard in your browser. + + To open the dashboard for a specific repository, use the %[2]s flag: + %[3]s + `, style.Cyan("deepsource repository dashboard"), style.Yellow("--repo"), style.Cyan("deepsource repository dashboard --repo owner/repo")) + + cmd := &cobra.Command{ + Use: "dashboard", + Short: "Open the DeepSource dashboard for the current repository", + Long: doc, + Args: args.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return opts.Run() + }, + } + + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Open the dashboard for a specific repository") + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + + return cmd +} + +func (opts *DashboardOptions) Run() error { + ctx := context.Background() + svc := reposvc.NewService(config.DefaultManager()) + dashboardURL, err := svc.ViewURL(ctx, opts.RepoArg) + if err != nil { + return err + } + + fmt.Printf("Opening %s in your browser...\n", dashboardURL) + return browser.OpenURL(dashboardURL) +} diff --git a/command/repository/repository.go b/command/repository/repository.go new file mode 100644 index 00000000..15e570f9 --- /dev/null +++ b/command/repository/repository.go @@ -0,0 +1,21 @@ +package repository + +import ( + "github.com/spf13/cobra" + + "github.com/deepsourcelabs/cli/command/repository/analyzers" + "github.com/deepsourcelabs/cli/command/repository/dashboard" + "github.com/deepsourcelabs/cli/command/repository/status" +) + +func NewCmdRepository() *cobra.Command { + cmd := &cobra.Command{ + Use: "repository", + Aliases: []string{"repo"}, + Short: "Manage repository settings", + } + cmd.AddCommand(dashboard.NewCmdDashboard()) + cmd.AddCommand(status.NewCmdRepoStatus()) + cmd.AddCommand(analyzers.NewCmdAnalyzers()) + return cmd +} diff --git a/command/repository/status/status.go b/command/repository/status/status.go new file mode 100644 index 00000000..a5265330 --- /dev/null +++ b/command/repository/status/status.go @@ -0,0 +1,135 @@ +package status + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + + "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/internal/cli/args" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + reposvc "github.com/deepsourcelabs/cli/internal/services/repo" + "github.com/deepsourcelabs/cli/internal/vcs" + "github.com/pterm/pterm" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" +) + +type RepoStatusOptions struct { + RepoArg string + TokenExpired bool + SelectedRemote *vcs.RemoteData + Output string + deps *cmddeps.Deps +} + +func (opts *RepoStatusOptions) stdout() io.Writer { + if opts.deps != nil && opts.deps.Stdout != nil { + return opts.deps.Stdout + } + return os.Stdout +} + +func NewCmdRepoStatus() *cobra.Command { + return NewCmdRepoStatusWithDeps(nil) +} + +func NewCmdRepoStatusWithDeps(deps *cmddeps.Deps) *cobra.Command { + opts := RepoStatusOptions{ + RepoArg: "", + TokenExpired: false, + deps: deps, + } + + doc := heredoc.Docf(` + View the activation status for the repository. + + To check if the current repository is activated on DeepSource, run: + %[1]s + + To check if a specific repository is activated on DeepSource, use the %[2]s flag: + %[3]s + `, style.Cyan("deepsource repository status"), style.Yellow("--repo"), style.Cyan("deepsource repository status --repo repo_name")) + + cmd := &cobra.Command{ + Use: "status", + Short: "View the activation status for the repository.", + Long: doc, + Args: args.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return opts.Run() + }, + } + + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Get the activation status of the specified repository") + cmd.Flags().StringVar(&opts.Output, "output", "pretty", "Output format: pretty, table, json, yaml") + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed output", + "table\tHuman-readable table", + "json\tJSON output", + "yaml\tYAML output", + }, cobra.ShellCompDirectiveNoFileComp + }) + return cmd +} + +func (opts *RepoStatusOptions) Run() (err error) { + ctx := context.Background() + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + var svc *reposvc.Service + if opts.deps != nil && opts.deps.RepoService != nil { + svc = opts.deps.RepoService + } else { + svc = reposvc.NewService(cfgMgr) + } + result, err := svc.Status(ctx, opts.RepoArg) + if err != nil { + return err + } + opts.SelectedRemote = result.Remote + + return opts.printStatus(result) +} + +func (opts *RepoStatusOptions) printStatus(result *reposvc.StatusResult) error { + w := opts.stdout() + switch opts.Output { + case "", "pretty", "table": + if result.Activated { + pterm.Println("Analysis active on DeepSource (deepsource.com)") + } else { + pterm.Println("DeepSource analysis is currently not activated on this repository.") + } + return nil + case "json": + payload, err := json.MarshalIndent(result, "", " ") + if err != nil { + return fmt.Errorf("DeepSource | Error | Failed to format JSON output: %w", err) + } + fmt.Fprintf(w, "%s\n", payload) + return nil + case "yaml": + payload, err := yaml.Marshal(result) + if err != nil { + return fmt.Errorf("DeepSource | Error | Failed to format YAML output: %w", err) + } + fmt.Fprint(w, string(payload)) + return nil + default: + return fmt.Errorf("DeepSource | Error | Unsupported output format: %s", opts.Output) + } +} diff --git a/command/repository/status/tests/golden_files/activated_output.json b/command/repository/status/tests/golden_files/activated_output.json new file mode 100644 index 00000000..c811e4a3 --- /dev/null +++ b/command/repository/status/tests/golden_files/activated_output.json @@ -0,0 +1,9 @@ +{ + "Remote": { + "Owner": "testowner", + "RepoName": "testrepo", + "VCSProvider": "GITHUB" + }, + "Activated": true, + "Host": "deepsource.com" +} diff --git a/command/repository/status/tests/golden_files/activated_response.json b/command/repository/status/tests/golden_files/activated_response.json new file mode 100644 index 00000000..09c16443 --- /dev/null +++ b/command/repository/status/tests/golden_files/activated_response.json @@ -0,0 +1,5 @@ +{ + "repository": { + "isActivated": true + } +} diff --git a/command/repository/status/tests/golden_files/not_activated_output.json b/command/repository/status/tests/golden_files/not_activated_output.json new file mode 100644 index 00000000..b16b441d --- /dev/null +++ b/command/repository/status/tests/golden_files/not_activated_output.json @@ -0,0 +1,9 @@ +{ + "Remote": { + "Owner": "testowner", + "RepoName": "testrepo", + "VCSProvider": "GITHUB" + }, + "Activated": false, + "Host": "deepsource.com" +} diff --git a/command/repository/status/tests/golden_files/not_activated_response.json b/command/repository/status/tests/golden_files/not_activated_response.json new file mode 100644 index 00000000..e6a6519d --- /dev/null +++ b/command/repository/status/tests/golden_files/not_activated_response.json @@ -0,0 +1,5 @@ +{ + "repository": { + "isActivated": false + } +} diff --git a/command/repository/status/tests/status_test.go b/command/repository/status/tests/status_test.go new file mode 100644 index 00000000..30a2e22a --- /dev/null +++ b/command/repository/status/tests/status_test.go @@ -0,0 +1,84 @@ +package tests + +import ( + "bytes" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/command/cmddeps" + statusCmd "github.com/deepsourcelabs/cli/command/repository/status" + "github.com/deepsourcelabs/cli/deepsource" + reposvc "github.com/deepsourcelabs/cli/internal/services/repo" + "github.com/deepsourcelabs/cli/internal/testutil" +) + +func goldenPath(name string) string { + _, callerFile, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(callerFile), "golden_files", name) +} + +func TestRepoStatusActivated(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "isActivated": goldenPath("activated_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + svc := reposvc.NewTestService(cfgMgr, func(opts deepsource.ClientOpts) (reposvc.Client, error) { + return client, nil + }) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + ConfigMgr: cfgMgr, + Stdout: &buf, + RepoService: svc, + } + + cmd := statusCmd.NewCmdRepoStatusWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("activated_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestRepoStatusNotActivated(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "isActivated": goldenPath("not_activated_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + svc := reposvc.NewTestService(cfgMgr, func(opts deepsource.ClientOpts) (reposvc.Client, error) { + return client, nil + }) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + ConfigMgr: cfgMgr, + Stdout: &buf, + RepoService: svc, + } + + cmd := statusCmd.NewCmdRepoStatusWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("not_activated_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} diff --git a/command/root.go b/command/root.go index 73e323b3..c0ab1464 100644 --- a/command/root.go +++ b/command/root.go @@ -1,39 +1,61 @@ package command import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/buildinfo" "github.com/deepsourcelabs/cli/command/auth" - "github.com/deepsourcelabs/cli/command/config" + "github.com/deepsourcelabs/cli/command/analysis" "github.com/deepsourcelabs/cli/command/issues" - "github.com/deepsourcelabs/cli/command/repo" + "github.com/deepsourcelabs/cli/command/metrics" + "github.com/deepsourcelabs/cli/command/repository" "github.com/deepsourcelabs/cli/command/report" - "github.com/deepsourcelabs/cli/command/version" + "github.com/deepsourcelabs/cli/command/vulnerabilities" "github.com/spf13/cobra" ) func NewCmdRoot() *cobra.Command { cmd := &cobra.Command{ - Use: "deepsource [flags]", + Use: buildinfo.AppName + " [flags]", Short: "DeepSource CLI", - Long: `Welcome to DeepSource CLI -Now ship good code directly from the command line. - -Login into DeepSource using the command : deepsource auth login`, + Long: "DeepSource CLI - Ship good code from the command line.\n\nTo get started, run: " + buildinfo.AppName + " auth login", SilenceErrors: true, SilenceUsage: true, } + // Set version using --version flag + info := buildinfo.GetBuildInfo() + if info != nil { + cmd.Version = info.Version + cmd.SetVersionTemplate(fmt.Sprintf("DeepSource CLI %s (%s)\n", info.Version, info.GitCommit)) + } + + // Disable default completion command + cmd.CompletionOptions.DisableDefaultCmd = true + + // Hide help subcommand (--help flag still works) + cmd.SetHelpCommand(&cobra.Command{Hidden: true}) + // Child Commands - cmd.AddCommand(version.NewCmdVersion()) - cmd.AddCommand(config.NewCmdConfig()) cmd.AddCommand(auth.NewCmdAuth()) - cmd.AddCommand(repo.NewCmdRepo()) - cmd.AddCommand(issues.NewCmdIssues()) + cmd.AddCommand(repository.NewCmdRepository()) + cmd.AddCommand(analysis.NewCmdAnalysis()) cmd.AddCommand(report.NewCmdReport()) + cmd.AddCommand(issues.NewCmdIssues()) + cmd.AddCommand(metrics.NewCmdMetrics()) + cmd.AddCommand(vulnerabilities.NewCmdVulnerabilities()) return cmd } func Execute() error { + return ExecuteContext(context.Background()) +} + +// ExecuteContext runs the root command with a parent context. +func ExecuteContext(ctx context.Context) error { cmd := NewCmdRoot() + cmd.SetContext(ctx) return cmd.Execute() } diff --git a/command/version/command.go b/command/version/command.go deleted file mode 100644 index b7fff01c..00000000 --- a/command/version/command.go +++ /dev/null @@ -1,45 +0,0 @@ -package version - -import ( - "fmt" - - "github.com/deepsourcelabs/cli/utils" - "github.com/deepsourcelabs/cli/version" - "github.com/spf13/cobra" -) - -// Options holds the metadata. -type Options struct{} - -// For testing. TODO: cleanup -var getBuildInfo = version.GetBuildInfo - -// NewCmdVersion returns the current version of cli being used -func NewCmdVersion() *cobra.Command { - cmd := &cobra.Command{ - Use: "version", - Short: "Get the version of the DeepSource CLI", - Args: utils.NoArgs, - Run: func(cmd *cobra.Command, args []string) { - o := Options{} - fmt.Println(o.Run()) - }, - SilenceErrors: true, - SilenceUsage: true, - } - return cmd -} - -// Validate impletments the Validate method for the ICommand interface. -func (Options) Validate() error { - return nil -} - -// Run executest the command. -func (Options) Run() string { - buildInfo := getBuildInfo() - if buildInfo == nil { - return "" - } - return getBuildInfo().String() -} diff --git a/command/version/command_test.go b/command/version/command_test.go deleted file mode 100644 index 661ec296..00000000 --- a/command/version/command_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package version - -import ( - "testing" - "time" - - "github.com/deepsourcelabs/cli/version" -) - -func TestOptions_Run(t *testing.T) { - date, _ := time.Parse("2006-01-02", "2021-01-21") - - getBuildInfo = func() *version.BuildInfo { - return &version.BuildInfo{ - Version: "1.5.0", - Date: date, - } - } - - tests := []struct { - name string - o Options - want string - }{ - { - name: "must return the string output for command", - o: Options{}, - want: "DeepSource CLI version 1.5.0 (2021-01-21)", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - o := Options{} - if got := o.Run(); got != tt.want { - t.Errorf("Options.Run() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/command/vulnerabilities/tests/golden_files/pr_vulns_output.json b/command/vulnerabilities/tests/golden_files/pr_vulns_output.json new file mode 100644 index 00000000..d85241f3 --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/pr_vulns_output.json @@ -0,0 +1,29 @@ +{ + "number": 42, + "title": "Upgrade dependencies to fix vulnerabilities", + "base_branch": "main", + "branch": "fix/upgrade-deps", + "vulnerabilities": [ + { + "id": "vuln-occ-pr-001", + "reachability": "UNREACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-3333", + "severity": "MEDIUM", + "summary": "Information disclosure via error messages", + "cvss_v3_base_score": 5.3, + "fixed_versions": [ + "4.2.0" + ] + }, + "package": { + "name": "web-framework", + "ecosystem": "NPM" + }, + "package_version": { + "version": "4.1.2" + } + } + ] +} diff --git a/command/vulnerabilities/tests/golden_files/pr_vulns_response.json b/command/vulnerabilities/tests/golden_files/pr_vulns_response.json new file mode 100644 index 00000000..6a507ec2 --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/pr_vulns_response.json @@ -0,0 +1,35 @@ +{ + "repository": { + "pullRequest": { + "number": 42, + "title": "Upgrade dependencies to fix vulnerabilities", + "baseBranch": "main", + "branch": "fix/upgrade-deps", + "vulnerabilityOccurrences": { + "edges": [ + { + "node": { + "id": "vuln-occ-pr-001", + "reachability": "UNREACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-3333", + "severity": "MEDIUM", + "summary": "Information disclosure via error messages", + "cvssV3BaseScore": 5.3, + "fixedVersions": ["4.2.0"] + }, + "package": { + "name": "web-framework", + "ecosystem": "NPM" + }, + "packageVersion": { + "version": "4.1.2" + } + } + } + ] + } + } + } +} diff --git a/command/vulnerabilities/tests/golden_files/repo_vulns_output.json b/command/vulnerabilities/tests/golden_files/repo_vulns_output.json new file mode 100644 index 00000000..9d62f11a --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/repo_vulns_output.json @@ -0,0 +1,45 @@ +[ + { + "id": "vuln-occ-001", + "reachability": "REACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-1234", + "severity": "CRITICAL", + "summary": "Remote code execution via crafted input", + "cvss_v3_base_score": 9.8, + "fixed_versions": [ + "1.2.4", + "1.3.0" + ] + }, + "package": { + "name": "example-lib", + "ecosystem": "GO" + }, + "package_version": { + "version": "1.2.3" + } + }, + { + "id": "vuln-occ-002", + "reachability": "UNREACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-5678", + "severity": "HIGH", + "summary": "Denial of service through unbounded allocation", + "cvss_v3_base_score": 7.5, + "fixed_versions": [ + "2.0.1" + ] + }, + "package": { + "name": "another-pkg", + "ecosystem": "NPM" + }, + "package_version": { + "version": "2.0.0" + } + } +] diff --git a/command/vulnerabilities/tests/golden_files/repo_vulns_response.json b/command/vulnerabilities/tests/golden_files/repo_vulns_response.json new file mode 100644 index 00000000..6dda9088 --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/repo_vulns_response.json @@ -0,0 +1,50 @@ +{ + "repository": { + "dependencyVulnerabilityOccurrences": { + "edges": [ + { + "node": { + "id": "vuln-occ-001", + "reachability": "REACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-1234", + "severity": "CRITICAL", + "summary": "Remote code execution via crafted input", + "cvssV3BaseScore": 9.8, + "fixedVersions": ["1.2.4", "1.3.0"] + }, + "package": { + "name": "example-lib", + "ecosystem": "GO" + }, + "packageVersion": { + "version": "1.2.3" + } + } + }, + { + "node": { + "id": "vuln-occ-002", + "reachability": "UNREACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-5678", + "severity": "HIGH", + "summary": "Denial of service through unbounded allocation", + "cvssV3BaseScore": 7.5, + "fixedVersions": ["2.0.1"] + }, + "package": { + "name": "another-pkg", + "ecosystem": "NPM" + }, + "packageVersion": { + "version": "2.0.0" + } + } + } + ] + } + } +} diff --git a/command/vulnerabilities/tests/golden_files/run_vulns_output.json b/command/vulnerabilities/tests/golden_files/run_vulns_output.json new file mode 100644 index 00000000..cbeb50b5 --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/run_vulns_output.json @@ -0,0 +1,28 @@ +{ + "commit_oid": "abc123f0deadbeef1234567890abcdef12345678", + "branch_name": "main", + "status": "SUCCESS", + "vulnerabilities": [ + { + "id": "vuln-occ-run-001", + "reachability": "REACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-9999", + "severity": "HIGH", + "summary": "Path traversal in file upload handler", + "cvss_v3_base_score": 8.1, + "fixed_versions": [ + "3.1.0" + ] + }, + "package": { + "name": "file-handler", + "ecosystem": "PYPI" + }, + "package_version": { + "version": "3.0.5" + } + } + ] +} diff --git a/command/vulnerabilities/tests/golden_files/run_vulns_response.json b/command/vulnerabilities/tests/golden_files/run_vulns_response.json new file mode 100644 index 00000000..cc990e6a --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/run_vulns_response.json @@ -0,0 +1,40 @@ +{ + "run": { + "commitOid": "abc123f0deadbeef1234567890abcdef12345678", + "branchName": "main", + "status": "SUCCESS", + "scaChecks": { + "edges": [ + { + "node": { + "vulnerabilityOccurrences": { + "edges": [ + { + "node": { + "id": "vuln-occ-run-001", + "reachability": "REACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-9999", + "severity": "HIGH", + "summary": "Path traversal in file upload handler", + "cvssV3BaseScore": 8.1, + "fixedVersions": ["3.1.0"] + }, + "package": { + "name": "file-handler", + "ecosystem": "PYPI" + }, + "packageVersion": { + "version": "3.0.5" + } + } + } + ] + } + } + } + ] + } + } +} diff --git a/command/vulnerabilities/tests/golden_files/severity_filter_output.json b/command/vulnerabilities/tests/golden_files/severity_filter_output.json new file mode 100644 index 00000000..b7048b4e --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/severity_filter_output.json @@ -0,0 +1,44 @@ +[ + { + "id": "vuln-occ-sev-001", + "reachability": "REACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-1111", + "severity": "CRITICAL", + "summary": "SQL injection in query builder", + "cvss_v3_base_score": 9.1, + "fixed_versions": [ + "5.0.1" + ] + }, + "package": { + "name": "db-driver", + "ecosystem": "GO" + }, + "package_version": { + "version": "5.0.0" + } + }, + { + "id": "vuln-occ-sev-002", + "reachability": "UNREACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-2222", + "severity": "HIGH", + "summary": "Buffer overflow in parser", + "cvss_v3_base_score": 7.8, + "fixed_versions": [ + "2.5.0" + ] + }, + "package": { + "name": "parser-lib", + "ecosystem": "CARGO" + }, + "package_version": { + "version": "2.4.3" + } + } +] diff --git a/command/vulnerabilities/tests/golden_files/severity_filter_response.json b/command/vulnerabilities/tests/golden_files/severity_filter_response.json new file mode 100644 index 00000000..f5d64b49 --- /dev/null +++ b/command/vulnerabilities/tests/golden_files/severity_filter_response.json @@ -0,0 +1,71 @@ +{ + "repository": { + "dependencyVulnerabilityOccurrences": { + "edges": [ + { + "node": { + "id": "vuln-occ-sev-001", + "reachability": "REACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-1111", + "severity": "CRITICAL", + "summary": "SQL injection in query builder", + "cvssV3BaseScore": 9.1, + "fixedVersions": ["5.0.1"] + }, + "package": { + "name": "db-driver", + "ecosystem": "GO" + }, + "packageVersion": { + "version": "5.0.0" + } + } + }, + { + "node": { + "id": "vuln-occ-sev-002", + "reachability": "UNREACHABLE", + "fixability": "FIXABLE", + "vulnerability": { + "identifier": "CVE-2024-2222", + "severity": "HIGH", + "summary": "Buffer overflow in parser", + "cvssV3BaseScore": 7.8, + "fixedVersions": ["2.5.0"] + }, + "package": { + "name": "parser-lib", + "ecosystem": "CARGO" + }, + "packageVersion": { + "version": "2.4.3" + } + } + }, + { + "node": { + "id": "vuln-occ-sev-003", + "reachability": "UNREACHABLE", + "fixability": "UNFIXABLE", + "vulnerability": { + "identifier": "CVE-2024-4444", + "severity": "LOW", + "summary": "Timing side-channel in comparison", + "cvssV3BaseScore": 3.1, + "fixedVersions": [] + }, + "package": { + "name": "crypto-utils", + "ecosystem": "NPM" + }, + "packageVersion": { + "version": "1.0.0" + } + } + } + ] + } + } +} diff --git a/command/vulnerabilities/tests/vulns_test.go b/command/vulnerabilities/tests/vulns_test.go new file mode 100644 index 00000000..afc8c0a4 --- /dev/null +++ b/command/vulnerabilities/tests/vulns_test.go @@ -0,0 +1,135 @@ +package tests + +import ( + "bytes" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/command/cmddeps" + vulnsCmd "github.com/deepsourcelabs/cli/command/vulnerabilities" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/internal/testutil" +) + +func goldenPath(name string) string { + _, callerFile, _, _ := runtime.Caller(0) + return filepath.Join(filepath.Dir(callerFile), "golden_files", name) +} + +func TestVulnsDefaultBranch(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "dependencyVulnerabilityOccurrences(first: $limit)": goldenPath("repo_vulns_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := vulnsCmd.NewCmdVulnerabilitiesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("repo_vulns_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestVulnsCommitScope(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "scaChecks {": goldenPath("run_vulns_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := vulnsCmd.NewCmdVulnerabilitiesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--commit", "abc123f", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("run_vulns_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestVulnsPRScope(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "vulnerabilityOccurrences(first: $limit)": goldenPath("pr_vulns_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := vulnsCmd.NewCmdVulnerabilitiesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--pr", "42", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("pr_vulns_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} + +func TestVulnsFilterBySeverity(t *testing.T) { + cfgMgr := testutil.CreateTestConfigManager(t, "test-token", "deepsource.com", "test@example.com") + mock := testutil.MockQueryFunc(t, map[string]string{ + "dependencyVulnerabilityOccurrences(first: $limit)": goldenPath("severity_filter_response.json"), + }) + client := deepsource.NewWithGraphQLClient(mock) + + var buf bytes.Buffer + deps := &cmddeps.Deps{ + Client: client, + ConfigMgr: cfgMgr, + Stdout: &buf, + } + + cmd := vulnsCmd.NewCmdVulnerabilitiesWithDeps(deps) + cmd.SetArgs([]string{"--repo", "gh/testowner/testrepo", "--severity", "critical,high", "--output", "json"}) + + if err := cmd.Execute(); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + expected := string(testutil.LoadGoldenFile(t, goldenPath("severity_filter_output.json"))) + got := buf.String() + + if strings.TrimSpace(got) != strings.TrimSpace(expected) { + t.Errorf("output mismatch.\nExpected:\n%s\nGot:\n%s", expected, got) + } +} diff --git a/command/vulnerabilities/vulnerabilities.go b/command/vulnerabilities/vulnerabilities.go new file mode 100644 index 00000000..580c71c5 --- /dev/null +++ b/command/vulnerabilities/vulnerabilities.go @@ -0,0 +1,535 @@ +package vulnerabilities + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + "strings" + + "github.com/MakeNowJust/heredoc" + "github.com/deepsourcelabs/cli/command/cmddeps" + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/deepsource/vulnerabilities" + "github.com/deepsourcelabs/cli/internal/cli/completion" + "github.com/deepsourcelabs/cli/internal/cli/style" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + "github.com/deepsourcelabs/cli/internal/vcs" + "github.com/pterm/pterm" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" +) + +type VulnerabilitiesOptions struct { + RepoArg string + CommitOid string + PRNumber int + OutputFormat string + OutputFile string + Verbose bool + LimitArg int + SeverityFilters []string + repoSlug string + repoVulns []vulnerabilities.VulnerabilityOccurrence + runVulns *vulnerabilities.RunVulns + prVulns *vulnerabilities.PRVulns + deps *cmddeps.Deps +} + +func (opts *VulnerabilitiesOptions) stdout() io.Writer { + if opts.deps != nil && opts.deps.Stdout != nil { + return opts.deps.Stdout + } + return os.Stdout +} + +func NewCmdVulnerabilities() *cobra.Command { + return NewCmdVulnerabilitiesWithDeps(nil) +} + +func NewCmdVulnerabilitiesWithDeps(deps *cmddeps.Deps) *cobra.Command { + opts := VulnerabilitiesOptions{ + OutputFormat: "pretty", + LimitArg: 100, + deps: deps, + } + + doc := heredoc.Docf(` + View dependency vulnerabilities for a repository. + + By default, shows vulnerabilities from the default branch. Use %[1]s or %[2]s + to scope to a specific analysis run or pull request. + + Examples: + %[3]s + %[4]s + %[5]s + %[6]s + %[7]s + `, + style.Yellow("--commit"), + style.Yellow("--pr"), + style.Cyan("deepsource vulnerabilities"), + style.Cyan("deepsource vulnerabilities --repo owner/repo"), + style.Cyan("deepsource vulnerabilities --commit abc123f"), + style.Cyan("deepsource vulnerabilities --pr 123"), + style.Cyan("deepsource vulnerabilities --severity critical,high"), + ) + + cmd := &cobra.Command{ + Use: "vulnerabilities [flags]", + Short: "View dependency vulnerabilities", + Long: doc, + RunE: func(cmd *cobra.Command, args []string) error { + return opts.Run(cmd.Context()) + }, + } + + // --repo, -r flag + cmd.Flags().StringVarP(&opts.RepoArg, "repo", "r", "", "Repository (owner/name)") + + // Scoping flags + cmd.Flags().StringVar(&opts.CommitOid, "commit", "", "Scope to a specific analysis run by commit OID") + cmd.Flags().IntVar(&opts.PRNumber, "pr", 0, "Scope to a specific pull request by number") + + // --output flag + cmd.Flags().StringVarP(&opts.OutputFormat, "output", "o", "pretty", "Output format: pretty, table, json, yaml") + + // --output-file flag + cmd.Flags().StringVar(&opts.OutputFile, "output-file", "", "Write output to a file instead of stdout") + + // --verbose, -v flag + cmd.Flags().BoolVarP(&opts.Verbose, "verbose", "v", false, "Show CVSS score, summary, fix versions, and reachability") + + // --limit flag + cmd.Flags().IntVarP(&opts.LimitArg, "limit", "l", 100, "Maximum number of vulnerabilities to fetch") + + // --severity filter flag + cmd.Flags().StringSliceVar(&opts.SeverityFilters, "severity", nil, "Filter by severity (e.g. critical,high)") + + // Completions + _ = cmd.RegisterFlagCompletionFunc("repo", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return completion.RepoCompletionCandidates(), cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("output", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "pretty\tPretty-printed grouped output", + "table\tTabular output", + "json\tJSON output", + "yaml\tYAML output", + }, cobra.ShellCompDirectiveNoFileComp + }) + _ = cmd.RegisterFlagCompletionFunc("severity", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"critical", "high", "medium", "low", "none"}, cobra.ShellCompDirectiveNoFileComp + }) + + // Mutual exclusivity + cmd.MarkFlagsMutuallyExclusive("commit", "pr") + + return cmd +} + +func (opts *VulnerabilitiesOptions) Run(ctx context.Context) error { + // Load configuration + var cfgMgr *config.Manager + if opts.deps != nil && opts.deps.ConfigMgr != nil { + cfgMgr = opts.deps.ConfigMgr + } else { + cfgMgr = config.DefaultManager() + } + cfg, err := cfgMgr.Load() + if err != nil { + return clierrors.NewCLIError(clierrors.ErrInvalidConfig, "Error reading DeepSource CLI config", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return err + } + + // Resolve remote repository + remote, err := vcs.ResolveRemote(opts.RepoArg) + if err != nil { + return err + } + opts.repoSlug = remote.Owner + "/" + remote.RepoName + + // Create DeepSource client + var client *deepsource.Client + if opts.deps != nil && opts.deps.Client != nil { + client = opts.deps.Client + } else { + client, err = deepsource.New(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: cfgMgr.TokenRefreshCallback(), + }) + if err != nil { + return err + } + } + + // Fetch vulnerabilities based on scope + switch { + case opts.CommitOid != "": + opts.runVulns, err = client.GetRunVulns(ctx, opts.CommitOid, opts.LimitArg) + case opts.PRNumber > 0: + opts.prVulns, err = client.GetPRVulns(ctx, remote.Owner, remote.RepoName, remote.VCSProvider, opts.PRNumber, opts.LimitArg) + default: + opts.repoVulns, err = client.GetRepoVulns(ctx, remote.Owner, remote.RepoName, remote.VCSProvider, opts.LimitArg) + } + if err != nil { + return err + } + + // Apply severity filter if provided + opts.applyFilters() + + // Output based on format + switch opts.OutputFormat { + case "json": + return opts.outputJSON() + case "yaml": + return opts.outputYAML() + case "table": + return opts.outputTable() + default: + return opts.outputHuman() + } +} + +func (opts *VulnerabilitiesOptions) getVulns() []vulnerabilities.VulnerabilityOccurrence { + switch { + case opts.runVulns != nil: + return opts.runVulns.Vulns + case opts.prVulns != nil: + return opts.prVulns.Vulns + default: + return opts.repoVulns + } +} + +func (opts *VulnerabilitiesOptions) applyFilters() { + if len(opts.SeverityFilters) == 0 { + return + } + + severitySet := make(map[string]struct{}) + for _, s := range opts.SeverityFilters { + severitySet[strings.ToUpper(strings.TrimSpace(s))] = struct{}{} + } + + filterVulns := func(vulnsList []vulnerabilities.VulnerabilityOccurrence) []vulnerabilities.VulnerabilityOccurrence { + filtered := make([]vulnerabilities.VulnerabilityOccurrence, 0) + for _, v := range vulnsList { + if _, ok := severitySet[strings.ToUpper(v.Vulnerability.Severity)]; ok { + filtered = append(filtered, v) + } + } + return filtered + } + + switch { + case opts.runVulns != nil: + opts.runVulns.Vulns = filterVulns(opts.runVulns.Vulns) + case opts.prVulns != nil: + opts.prVulns.Vulns = filterVulns(opts.prVulns.Vulns) + default: + opts.repoVulns = filterVulns(opts.repoVulns) + } +} + +func (opts *VulnerabilitiesOptions) hasFilters() bool { + return len(opts.SeverityFilters) > 0 +} + +func (opts *VulnerabilitiesOptions) outputHuman() error { + vulnsList := opts.getVulns() + + if len(vulnsList) == 0 { + if opts.hasFilters() { + pterm.Info.Println("No vulnerabilities matched the provided filters.") + } else { + pterm.Info.Println("No vulnerabilities found.") + } + return nil + } + + severityOrder := []string{"CRITICAL", "HIGH", "MEDIUM", "LOW", "NONE"} + groups := make(map[string][]vulnerabilities.VulnerabilityOccurrence) + for _, v := range vulnsList { + sev := strings.ToUpper(v.Vulnerability.Severity) + groups[sev] = append(groups[sev], v) + } + + for _, sev := range severityOrder { + group, ok := groups[sev] + if !ok || len(group) == 0 { + continue + } + + header := fmt.Sprintf("%s (%d)", humanizeSeverity(sev), len(group)) + fmt.Println(colorSeverity(sev, header)) + fmt.Println() + + for _, v := range group { + ecosystem := v.Package.Ecosystem + if ecosystem == "" { + ecosystem = "unknown" + } + fmt.Printf(" %s: %s@%s (%s)\n", + v.Vulnerability.Identifier, + v.Package.Name, + v.PackageVersion.Version, + ecosystem, + ) + + if opts.Verbose { + if v.Vulnerability.CvssV3BaseScore != nil { + fmt.Printf(" CVSS: %.1f\n", *v.Vulnerability.CvssV3BaseScore) + } + if v.Vulnerability.Summary != "" { + fmt.Printf(" %s\n", v.Vulnerability.Summary) + } + if len(v.Vulnerability.FixedVersions) > 0 { + fmt.Printf(" Fixed in: %s\n", strings.Join(v.Vulnerability.FixedVersions, ", ")) + } + if v.Reachability != "" { + fmt.Printf(" Reachability: %s\n", strings.ToLower(v.Reachability)) + } + if v.Fixability != "" { + fmt.Printf(" Fixability: %s\n", strings.ToLower(v.Fixability)) + } + fmt.Println() + } + } + + if !opts.Verbose { + fmt.Println() + } + } + + fmt.Printf("Showing %d vulnerability(ies) in %s", len(vulnsList), opts.repoSlug) + switch { + case opts.runVulns != nil: + commitShort := opts.runVulns.CommitOid + if len(commitShort) > 7 { + commitShort = commitShort[:7] + } + fmt.Printf(" from commit %s\n", commitShort) + case opts.prVulns != nil: + fmt.Printf(" from PR #%d\n", opts.prVulns.Number) + default: + fmt.Println(" from default branch") + } + return nil +} + +func (opts *VulnerabilitiesOptions) outputTable() error { + vulnsList := opts.getVulns() + + if len(vulnsList) == 0 { + if opts.hasFilters() { + pterm.Info.Println("No vulnerabilities matched the provided filters.") + } else { + pterm.Info.Println("No vulnerabilities found.") + } + return nil + } + + // Show context header for run/PR scopes + if opts.runVulns != nil { + commitShort := opts.runVulns.CommitOid + if len(commitShort) > 8 { + commitShort = commitShort[:8] + } + pterm.DefaultBox.WithTitle("Run Vulnerabilities").WithTitleTopCenter().Println( + fmt.Sprintf("%s %s\n%s %s", + pterm.Bold.Sprint("Commit:"), + commitShort, + pterm.Bold.Sprint("Branch:"), + opts.runVulns.BranchName, + ), + ) + pterm.Println() + } else if opts.prVulns != nil { + pterm.DefaultBox.WithTitle("Pull Request Vulnerabilities").WithTitleTopCenter().Println( + fmt.Sprintf("%s #%d\n%s %s", + pterm.Bold.Sprint("PR:"), + opts.prVulns.Number, + pterm.Bold.Sprint("Branch:"), + opts.prVulns.Branch, + ), + ) + pterm.Println() + } + + // Build vulnerabilities table + header := []string{"ID", "Severity", "Package", "Version", "Ecosystem", "Fix", "Reachability"} + data := [][]string{header} + + for _, v := range vulnsList { + fix := "-" + if len(v.Vulnerability.FixedVersions) > 0 { + fix = v.Vulnerability.FixedVersions[0] + } + + reachable := formatReachability(v.Reachability) + severity := formatSeverity(v.Vulnerability.Severity) + ecosystem := humanizeEcosystem(v.Package.Ecosystem) + + data = append(data, []string{ + v.Vulnerability.Identifier, + severity, + v.Package.Name, + v.PackageVersion.Version, + ecosystem, + fix, + reachable, + }) + } + + pterm.DefaultTable.WithHasHeader().WithData(data).Render() + pterm.Printf("\nShowing %d vulnerability(ies)\n", len(vulnsList)) + + return nil +} + +func formatSeverity(severity string) string { + humanized := humanizeSeverity(severity) + switch strings.ToUpper(severity) { + case "CRITICAL": + return pterm.Red(humanized) + case "HIGH": + return pterm.LightRed(humanized) + case "MEDIUM": + return pterm.Yellow(humanized) + case "LOW": + return pterm.Blue(humanized) + default: + return humanized + } +} + +func formatReachability(reachability string) string { + switch strings.ToUpper(reachability) { + case "REACHABLE": + return pterm.Red("Yes") + case "UNREACHABLE": + return pterm.Green("No") + default: + return "Unknown" + } +} + +func humanizeEcosystem(ecosystem string) string { + switch strings.ToUpper(ecosystem) { + case "GO": + return "Go" + case "NPM": + return "npm" + case "PYPI": + return "PyPI" + case "MAVEN": + return "Maven" + case "RUBYGEMS": + return "RubyGems" + case "NUGET": + return "NuGet" + case "CARGO": + return "Cargo" + case "PACKAGIST": + return "Packagist" + default: + if ecosystem == "" { + return "-" + } + return ecosystem + } +} + +func humanizeSeverity(s string) string { + switch strings.ToUpper(s) { + case "CRITICAL": + return "Critical" + case "HIGH": + return "High" + case "MEDIUM": + return "Medium" + case "LOW": + return "Low" + case "NONE": + return "None" + default: + return s + } +} + +func colorSeverity(sev string, text string) string { + switch strings.ToUpper(sev) { + case "CRITICAL": + return pterm.Red(text) + case "HIGH": + return pterm.LightRed(text) + case "MEDIUM": + return pterm.Yellow(text) + case "LOW": + return pterm.Blue(text) + default: + return text + } +} + +func (opts *VulnerabilitiesOptions) outputJSON() error { + var data []byte + var err error + + switch { + case opts.runVulns != nil: + data, err = json.MarshalIndent(opts.runVulns, "", " ") + case opts.prVulns != nil: + data, err = json.MarshalIndent(opts.prVulns, "", " ") + default: + data, err = json.MarshalIndent(opts.repoVulns, "", " ") + } + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format JSON output", err) + } + return opts.writeOutput(data, true) +} + +func (opts *VulnerabilitiesOptions) outputYAML() error { + var data []byte + var err error + + switch { + case opts.runVulns != nil: + data, err = yaml.Marshal(opts.runVulns) + case opts.prVulns != nil: + data, err = yaml.Marshal(opts.prVulns) + default: + data, err = yaml.Marshal(opts.repoVulns) + } + if err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to format YAML output", err) + } + return opts.writeOutput(data, false) +} + +func (opts *VulnerabilitiesOptions) writeOutput(data []byte, trailingNewline bool) error { + if opts.OutputFile == "" { + w := opts.stdout() + if trailingNewline { + fmt.Fprintln(w, string(data)) + } else { + fmt.Fprint(w, string(data)) + } + return nil + } + + if err := os.WriteFile(opts.OutputFile, data, 0644); err != nil { + return clierrors.NewCLIError(clierrors.ErrAPIError, "Failed to write output file", err) + } + pterm.Printf("Saved vulnerabilities to %s!\n", opts.OutputFile) + return nil +} diff --git a/config/config.go b/config/config.go index d6746cb6..14f6a534 100644 --- a/config/config.go +++ b/config/config.go @@ -1,23 +1,15 @@ package config import ( - "errors" - "os" - "path/filepath" + "fmt" "time" - "github.com/pelletier/go-toml" -) - -var ( - configDirFn = os.UserHomeDir - readFileFn = os.ReadFile + "github.com/deepsourcelabs/cli/buildinfo" ) const ( - ConfigDirName = "/.deepsource/" ConfigFileName = "/config.toml" - DefaultHostName = "deepsource.io" + DefaultHostName = "deepsource.com" ) type CLIConfig struct { @@ -27,8 +19,6 @@ type CLIConfig struct { TokenExpiresIn time.Time `toml:"token_expires_in,omitempty"` } -var Cfg CLIConfig - // Sets the token expiry in the desired format // Sets the token expiry in the desired format func (cfg *CLIConfig) SetTokenExpiry(str string) { @@ -44,113 +34,9 @@ func (cfg CLIConfig) IsExpired() bool { return time.Now().After(cfg.TokenExpiresIn) } -// configDir returns the directory to store the config file. -func (CLIConfig) configDir() (string, error) { - home, err := configDirFn() - if err != nil { - return "", err - } - return filepath.Join(home, ConfigDirName), nil -} - -// configPath returns the file path to the config file. -func (cfg CLIConfig) configPath() (string, error) { - home, err := cfg.configDir() - if err != nil { - return "", err - } - return filepath.Join(home, ConfigFileName), nil -} - -// ReadFile reads the CLI config file. -func (cfg *CLIConfig) ReadConfigFile() error { - path, err := cfg.configPath() - if err != nil { - return err - } - - // check if config exists - _, err = os.Stat(path) - if err != nil { - return nil - } - - data, err := readFileFn(path) - if err != nil { - return err - } - err = toml.Unmarshal(data, cfg) - if err != nil { - return err - } - - return nil -} - -func GetConfig() (*CLIConfig, error) { - if Cfg.Token != "" { - return &Cfg, nil - } - - err := Cfg.ReadConfigFile() - if err != nil { - return &Cfg, err - } - return &Cfg, nil -} - -// WriteFile writes the CLI config to file. -func (cfg *CLIConfig) WriteFile() error { - data, err := toml.Marshal(cfg) - if err != nil { - return err - } - - configDir, err := cfg.configDir() - if err != nil { - return err - } - - if err := os.MkdirAll(configDir, os.ModePerm); err != nil { - return err - } - - path, err := cfg.configPath() - if err != nil { - return err - } - - // Create file - file, err := os.Create(path) - if err != nil { - return err - } - defer file.Close() - - _, err = file.Write(data) - - return err -} - -// Deletes the config during logging out user -func (cfg *CLIConfig) Delete() error { - path, err := cfg.configPath() - if err != nil { - return err - } - return os.Remove(path) -} - func (cfg *CLIConfig) VerifyAuthentication() error { - // Checking if the user has authenticated / logged in or not if cfg.Token == "" { - return errors.New("You are not logged into DeepSource. Run \"deepsource auth login\" to authenticate.") + return fmt.Errorf("You are not logged into DeepSource. Run \"%s auth login\" to authenticate.", buildinfo.AppName) } - - // // Check if the token has already expired - // if cfg.IsExpired() { - // return errors.New("The authentication has expired. Run \"deepsource auth refresh\" to refresh the credentials.") - // } - return nil } diff --git a/config/config_test.go b/config/config_test.go index 62002b6b..c8169db5 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -8,7 +8,7 @@ import ( ) var cfg = CLIConfig{ - Host: "deepsource.io", + Host: "deepsource.com", User: "test", Token: "test_token", TokenExpiresIn: time.Time{}, @@ -39,16 +39,6 @@ func TestIsExpired(t *testing.T) { assert.Equal(t, true, result) } -func TestConfigDir(t *testing.T) { - _, err := cfg.configDir() - assert.Nil(t, err) -} - -func TestConfigPath(t *testing.T) { - _, err := cfg.configPath() - assert.Nil(t, err) -} - func TestGetConfig(t *testing.T) { _, err := GetConfig() assert.Nil(t, err) diff --git a/config/manager.go b/config/manager.go new file mode 100644 index 00000000..ec00e7ed --- /dev/null +++ b/config/manager.go @@ -0,0 +1,177 @@ +package config + +import ( + "errors" + "os" + "path/filepath" + + "github.com/deepsourcelabs/cli/buildinfo" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/interfaces" + "github.com/deepsourcelabs/cli/internal/secrets" + "github.com/pelletier/go-toml" +) + +// Manager handles reading and writing CLI config. +type Manager struct { + fs interfaces.FileSystem + homeDir func() (string, error) + secrets secrets.Store + secretsKey string +} + +// NewManager creates a config manager with injected dependencies. +func NewManager(fs interfaces.FileSystem, homeDir func() (string, error)) *Manager { + return NewManagerWithSecrets(fs, homeDir, secrets.NoopStore{}, "") +} + +// NewManagerWithSecrets creates a config manager with a secrets store. +func NewManagerWithSecrets(fs interfaces.FileSystem, homeDir func() (string, error), store secrets.Store, key string) *Manager { + if key == "" { + key = buildinfo.KeychainKey + } + if store == nil { + store = secrets.NoopStore{} + } + return &Manager{fs: fs, homeDir: homeDir, secrets: store, secretsKey: key} +} + +// DefaultManager returns a manager using OS-backed dependencies. +func DefaultManager() *Manager { + return NewManagerWithSecrets(adapters.NewOSFileSystem(), os.UserHomeDir, secrets.DefaultStore(), "") +} + +func (m *Manager) configDir() (string, error) { + home, err := m.homeDir() + if err != nil { + return "", err + } + return filepath.Join(home, buildinfo.ConfigDirName), nil +} + +func (m *Manager) configPath() (string, error) { + configDir, err := m.configDir() + if err != nil { + return "", err + } + return filepath.Join(configDir, ConfigFileName), nil +} + +// Load reads the CLI config file if it exists. +func (m *Manager) Load() (*CLIConfig, error) { + cfg := &CLIConfig{} + tomlPath, err := m.configPath() + if err != nil { + return cfg, err + } + + if exists, err := m.exists(tomlPath); err != nil { + return cfg, err + } else if exists { + data, err := m.fs.ReadFile(tomlPath) + if err != nil { + return cfg, err + } + + if err := toml.Unmarshal(data, cfg); err != nil { + return cfg, err + } + } + + if cfg.Token == "" { + if token, err := m.secrets.Get(m.secretsKey); err == nil { + cfg.Token = token + } + } + + return cfg, nil +} + +// Write persists the CLI config file. +func (m *Manager) Write(cfg *CLIConfig) error { + cfgToWrite := *cfg + if cfg.Token != "" { + if err := m.secrets.Set(m.secretsKey, cfg.Token); err == nil { + cfgToWrite.Token = "" + } + } + + data, err := toml.Marshal(&cfgToWrite) + if err != nil { + return err + } + + configDir, err := m.configDir() + if err != nil { + return err + } + + if err := m.fs.MkdirAll(configDir, 0o700); err != nil { + return err + } + + path, err := m.configPath() + if err != nil { + return err + } + + return m.fs.WriteFile(path, data, 0o644) +} + +// Delete removes the CLI config file if it exists. +func (m *Manager) Delete() error { + if err := m.secrets.Delete(m.secretsKey); err != nil && !errors.Is(err, secrets.ErrNotFound) && !errors.Is(err, secrets.ErrUnavailable) { + return err + } + + path, err := m.configPath() + if err != nil { + return err + } + if err := m.fs.Remove(path); err != nil && !errors.Is(err, os.ErrNotExist) { + return err + } + + return nil +} + +func (m *Manager) exists(path string) (bool, error) { + _, err := m.fs.Stat(path) + if err == nil { + return true, nil + } + if errors.Is(err, os.ErrNotExist) { + return false, nil + } + return false, err +} + +// TokenRefreshCallback returns a callback that persists refreshed token +// credentials. Intended for use with deepsource.ClientOpts.OnTokenRefreshed. +func (m *Manager) TokenRefreshCallback() func(token, expiry, email string) { + return func(token, expiry, email string) { + cfg, err := m.Load() + if err != nil { + return + } + cfg.Token = token + cfg.SetTokenExpiry(expiry) + cfg.User = email + _ = m.Write(cfg) + } +} + +// GetConfig loads the config using OS-backed defaults. +func GetConfig() (*CLIConfig, error) { + return DefaultManager().Load() +} + +// WriteConfig writes the config using OS-backed defaults. +func WriteConfig(cfg *CLIConfig) error { + return DefaultManager().Write(cfg) +} + +// DeleteConfig removes the config using OS-backed defaults. +func DeleteConfig() error { + return DefaultManager().Delete() +} diff --git a/config/manager_test.go b/config/manager_test.go new file mode 100644 index 00000000..fcbfd54d --- /dev/null +++ b/config/manager_test.go @@ -0,0 +1,42 @@ +package config + +import ( + "testing" + + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/secrets" + "github.com/stretchr/testify/assert" +) + +type fakeSecretStore struct { + data map[string]string +} + +func (f *fakeSecretStore) Get(key string) (string, error) { + value, ok := f.data[key] + if !ok { + return "", secrets.ErrNotFound + } + return value, nil +} + +func (f *fakeSecretStore) Set(key string, value string) error { + f.data[key] = value + return nil +} + +func (f *fakeSecretStore) Delete(key string) error { + delete(f.data, key) + return nil +} + +func TestManagerLoadTokenFromSecrets(t *testing.T) { + tempDir := t.TempDir() + homeDir := func() (string, error) { return tempDir, nil } + store := &fakeSecretStore{data: map[string]string{"token-key": "secret-token"}} + + mgr := NewManagerWithSecrets(adapters.NewOSFileSystem(), homeDir, store, "token-key") + cfg, err := mgr.Load() + assert.NoError(t, err) + assert.Equal(t, "secret-token", cfg.Token) +} diff --git a/configvalidator/analyzer_config_validator.go b/configvalidator/analyzer_config_validator.go deleted file mode 100644 index 3d2c78e7..00000000 --- a/configvalidator/analyzer_config_validator.go +++ /dev/null @@ -1,101 +0,0 @@ -package configvalidator - -import ( - "encoding/json" - "fmt" - "reflect" - - "github.com/deepsourcelabs/cli/utils" - "github.com/xeipuuv/gojsonschema" -) - -// Analyzer Config Validator -func (c *ConfigValidator) validateAnalyzersConfig() { - activatedAnalyzers := make(map[string]interface{}) - - // Analyzer array should not be empty - if len(c.Config.Analyzers) == 0 { - c.pushError("There must be atleast one activated `analyzer` in the config. Found: 0") - } - - // Analyzers should be an array - analyzersType := reflect.TypeOf(c.Config.Analyzers).Kind().String() - if analyzersType != "slice" { - c.pushError(fmt.Sprintf("Value of `analyzers` should be an array. Found: %v", analyzersType)) - } - - // Count enabled analyzers (missing enabled field defaults to true) - countEnabled := 0 - for _, analyzer := range c.Config.Analyzers { - // If enabled is not set (nil), consider it as enabled (true) - // If enabled is set, use its value - isEnabled := analyzer.Enabled == nil || *analyzer.Enabled - - if isEnabled { - countEnabled++ - } - } - if countEnabled == 0 && len(c.Config.Analyzers) > 0 { - c.pushError("There must be atleast one enabled `analyzer`. Found: 0") - } - - // ==== Analyzer shortcode validation ==== - supported := false - for _, analyzer := range c.Config.Analyzers { - for _, supportedAnalyzer := range utils.AnalyzersData.AnalyzerShortcodes { - if analyzer.Name == supportedAnalyzer { - // Copy the meta of activated analyzer for usage in - // analyzer meta validation - isEnabled := analyzer.Enabled == nil || *analyzer.Enabled - if isEnabled { - activatedAnalyzers[analyzer.Name] = analyzer.Meta - } - supported = true - break - } - } - if !supported { - c.pushError(fmt.Sprintf("Analyzer for \"%s\" is not supported yet.", analyzer.Name)) - } - - supported = false - } - - // ==== Meta Schema Validation ==== - - // Contains the meta-schema of the particular activated analyzer - var analyzerMetaSchema string - // Contains the user supplied meta - var userActivatedSchema interface{} - - // Iterating over the activated analyzers and - // validating the meta_schema - for analyzer, meta := range activatedAnalyzers { - analyzerMetaSchema = utils.AnalyzersData.AnalyzersMetaMap[analyzer] - userActivatedSchema = meta - - // Loading the Meta Schema obtained from API - schema := gojsonschema.NewStringLoader(analyzerMetaSchema) - // Loading the Meta Schema of the user after converting it - // into a JSON string - jsonUserSchema, _ := json.Marshal(userActivatedSchema) - inputMeta := gojsonschema.NewStringLoader(string(jsonUserSchema)) - - // If there is no meta-schema, write empty object in the inputSchema - if string(jsonUserSchema) == "null" { - inputMeta = gojsonschema.NewStringLoader("{}") - } - - // Validate the Meta Schema - result, _ := gojsonschema.Validate(schema, inputMeta) - if result.Valid() { - continue - } - finalErrString := fmt.Sprintf("Errors found while validating meta of %s analyzer: ", analyzer) - for _, err := range result.Errors() { - errString := err.String() - finalErrString = finalErrString + errString - } - c.pushError(finalErrString) - } -} diff --git a/configvalidator/analyzer_config_validator_test.go b/configvalidator/analyzer_config_validator_test.go deleted file mode 100644 index b764938a..00000000 --- a/configvalidator/analyzer_config_validator_test.go +++ /dev/null @@ -1,106 +0,0 @@ -package configvalidator - -import ( - "reflect" - "testing" -) - -func TestValidateAnalyzersConfig(t *testing.T) { - setDummyAnalyzerTransformerData() - type test struct { - inputConfig string - result bool - } - - tests := map[string]test{ - "valid config": { - inputConfig: ` - [[analyzers]] - name = "python" - enabled = true`, - result: true, - }, - "name should be a string": { - inputConfig: ` - [[analyzers]] - name = 123 - enabled = true`, - result: false, - }, - "`analyzers` should be an array": { - inputConfig: ` - analyzers = "python" - enabled = true`, - result: false, - }, - "atleast one analyzer should be enabled": { - inputConfig: ` - [[analyzers]] - name = "python" - enabled = false`, - result: false, - }, - "name cannot be of an unsupported analyzer": { - inputConfig: ` - [[analyzers]] - name = "foobar" - enabled = true`, - result: false, - }, - "analyzer with meta config": { - inputConfig: ` - [[analyzers]] - name = "python" - enabled = true - - [analyzers.meta] - max_line_length = 100 - skip_doc_coverage = ["module", "magic", "class"]`, - result: true, - }, - "max_line_length meta property validation": { - inputConfig: ` - [[analyzers]] - name = "python" - enabled = true - - [analyzers.meta] - max_line_length = -100`, - result: false, - }, - "valid multiple analyzers": { - inputConfig: ` - [[analyzers]] - name = "python" - enabled = true - - [analyzers.meta] - max_line_length = 100 - - [[analyzers]] - name = "test-coverage" - enabled = true`, - result: true, - }, - } - for testName, tc := range tests { - t.Run(testName, func(t *testing.T) { - testConfig, err := getConfig([]byte(tc.inputConfig)) - if err != nil { - t.Error(err) - } - c := &ConfigValidator{ - Config: *testConfig, - Result: Result{ - Valid: true, - Errors: []string{}, - ConfigReadError: false, - }, - } - c.validateAnalyzersConfig() - if !reflect.DeepEqual(tc.result, c.Result.Valid) { - t.Errorf("expected: %v, got: %v. Error: %v", tc.result, c.Result.Valid, c.Result.Errors) - } - }) - } -} diff --git a/configvalidator/config_validator.go b/configvalidator/config_validator.go deleted file mode 100644 index 75cac33e..00000000 --- a/configvalidator/config_validator.go +++ /dev/null @@ -1,78 +0,0 @@ -package configvalidator - -import ( - "bytes" - "fmt" - "strings" - - "github.com/spf13/viper" -) - -const ( - MAX_ALLOWED_VERSION = 1 -) - -type Result struct { - Valid bool - Errors []string - ConfigReadError bool -} - -// Struct to store the meta (Config) and output (Result) of config validation -type ConfigValidator struct { - Config DSConfig - Result Result -} - -// Entrypoint to the package `configvalidator` -// Accepts DeepSource config as a parameter and validates it -func (c *ConfigValidator) ValidateConfig(inputConfig []byte) Result { - // Base cases - c.Result.Valid = true - c.Result.ConfigReadError = false - - // Making a "config" struct based on DSConfig to store the DeepSource config - config := DSConfig{} - viper.SetConfigType("toml") - err := viper.ReadConfig(bytes.NewBuffer(inputConfig)) - if err != nil { - // Error while reading config - c.Result.Valid = false - c.Result.Errors = append(c.Result.Errors, err.Error()) - c.Result.ConfigReadError = true - return c.Result - } - // Unmarshaling the configdata into DSConfig struct - err = viper.UnmarshalExact(&config) - if err != nil { - // Check if the error is due to invalid enabled field types - // match `` * cannot parse 'analyzers[0].enabled' as bool: strconv.ParseBool: parsing "falsee": invalid syntax` - if strings.Contains(err.Error(), "strconv.ParseBool") { - c.Result.Valid = false - c.Result.Errors = append(c.Result.Errors, "The `enabled` property should be of boolean type (true/false)") - return c.Result - } - // Other unmarshaling errors - c.Result.Valid = false - c.Result.Errors = append(c.Result.Errors, fmt.Sprintf("Error while parsing config: %v", err)) - return c.Result - } - c.Config = config - - // Validate generic config which applies to all analyzers and transformers - // Includes : Version, Exclude Patterns, Test Patterns - c.validateGenericConfig() - - // Validate the Analyzers configuration - c.validateAnalyzersConfig() - - // Validate the Transformers configuration - c.validateTransformersConfig() - return c.Result -} - -// Utility function to push result string into the "ConfigValidator" struct -func (c *ConfigValidator) pushError(errorString string) { - c.Result.Errors = append(c.Result.Errors, errorString) - c.Result.Valid = false -} diff --git a/configvalidator/config_validator_test.go b/configvalidator/config_validator_test.go deleted file mode 100644 index 8c3c8564..00000000 --- a/configvalidator/config_validator_test.go +++ /dev/null @@ -1,203 +0,0 @@ -package configvalidator - -import ( - "testing" - - "github.com/deepsourcelabs/cli/utils" -) - -func TestValidateConfig(t *testing.T) { - type test struct { - inputConfig string - valid bool - } - setDummyAnalyzerTransformerData() - - tests := map[string]test{ - "blank config": { - inputConfig: "", - valid: false, - }, - "analyzer should be array": { - inputConfig: ` - version = 1 - analyzers = "python", - enabled = true`, - valid: false, - }, - "zero analyzers": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - enabled = false - - [[analyzers]] - name = "javascript" - enabled = true`, - valid: false, - }, - "transformer without analyzer": { - inputConfig: ` - version = 1 - - [[transformers]] - name = "black" - enabled = true`, - valid: false, - }, - "no analyzer/transformer activated": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - enabled = false - - [[transformers]] - name = "black" - enabled = false - - [[transformers]] - name = "isort" - enabled = false`, - valid: false, - }, - "tranformers with analyzer disabled": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - enabled = false - - [[transformers]] - name = "black" - enabled = true - - [[transformers]] - name = "isort" - enabled = true`, - valid: false, - }, - "non-supported transformer": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - enabled = true - - [[transformers]] - name = "egg" - enabled = true`, - valid: false, - }, - "transformer must be an array": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - enabled = true - - transformers = "egg" - enabled = true`, - valid: false, - }, - "valid config with enabled not set (defaults to true)": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - - [[transformers]] - name = "black"`, - valid: true, - }, - - "invalid config with enabled = \"falsee\" (non-boolean)": { - inputConfig: ` - version = 1 - - [[analyzers]] - name = "python" - enabled = "falsee"`, - valid: false, - }, - "config with syntax error": { - inputConfig: ` - version = 1 - - [[analyzers] - name = "python" - enabled = false`, - valid: false, - }, - } - - for testName, tc := range tests { - t.Run(testName, func(t *testing.T) { - c := &ConfigValidator{} - c.ValidateConfig([]byte(tc.inputConfig)) - if tc.valid != c.Result.Valid { - t.Errorf("%s: expected: %v, got: %v. Error: %v", testName, tc.valid, c.Result.Valid, c.Result.Errors) - } - }) - } -} - -func setDummyAnalyzerTransformerData() { - analyzersMetaMap := make(map[string]string) - utils.AnalyzersData.AnalyzerShortcodes = []string{"python", "test-coverage"} - utils.AnalyzersData.AnalyzersMeta = []string{`{ - "type": "object", - "properties": { - "max_line_length": { - "type": "integer", - "minimum": 79, - "title": "Maximum line length", - "description": "Customize this according to your project's conventions.", - "default": 100 - }, - "runtime_version": { - "enum": [ - "3.x.x", - "2.x.x" - ], - "type": "string", - "title": "Runtime version", - "description": "Set it to the least version of Python that your code runs on.", - "default": "3.x.x" - }, - "skip_doc_coverage": { - "type": "array", - "title": "Skip in doc coverage", - "description": "Types of objects that should be skipped while calculating documentation coverage.", - "items": { - "enum": [ - "magic", - "init", - "class", - "module", - "nonpublic" - ], - "type": "string" - }, - "additionalProperties": false - } - }, - "optional_required": [ - "runtime_version" - ], - "additionalProperties": false -}`, "{}"} - - analyzersMetaMap["python"] = utils.AnalyzersData.AnalyzersMeta[0] - analyzersMetaMap["test-coverage"] = utils.AnalyzersData.AnalyzersMeta[1] - utils.AnalyzersData.AnalyzersMetaMap = analyzersMetaMap - - utils.TransformersData.TransformerShortcodes = []string{"black", "prettier"} -} diff --git a/configvalidator/generic_config_validator.go b/configvalidator/generic_config_validator.go deleted file mode 100644 index 0d1b60d4..00000000 --- a/configvalidator/generic_config_validator.go +++ /dev/null @@ -1,93 +0,0 @@ -package configvalidator - -import ( - "fmt" - "reflect" - "strconv" - - "github.com/spf13/viper" -) - -// Generic Config : -// - Version -// - Exclude_Patterns -// - Test_Patterns - -// Validates version field of the DeepSource config -func (c *ConfigValidator) validateVersion() { - if viper.Get("version") != nil { - // Value of version must be an integer - if reflect.TypeOf(viper.Get("version")).Kind().String() != "int64" { - c.pushError(fmt.Sprintf("Value of `version` must be an integer. Got %s", reflect.TypeOf(viper.Get("version")).Kind().String())) - return - } - - // Should not be zero - versionInt, _ := strconv.Atoi(viper.GetString("version")) - if versionInt < 1 { - c.pushError(fmt.Sprintf("Value for `version` cannot be less than 1. Got %d", versionInt)) - } - - // Must be less than MAX_ALLOWED VERSION - if versionInt > MAX_ALLOWED_VERSION { - c.pushError(fmt.Sprintf("Value for `version` cannot be greater than %d. Got %d", MAX_ALLOWED_VERSION, versionInt)) - } - return - } - // if version is nil(not present in config) - c.pushError("Property `version` is mandatory.") -} - -// Validates `exclude_patterns` field of the DeepSource config -func (c *ConfigValidator) validateExcludePatterns() { - excludePatterns := viper.Get("exclude_patterns") - - // Sometimes the user doesn't add `exclude_patterns` to the code - // Validate only if excludePatterns present - if excludePatterns != nil { - // Must be a slice of string - exPatternType := reflect.TypeOf(excludePatterns).Kind().String() - if exPatternType != "slice" { - c.pushError(fmt.Sprintf("Value of `exclude_patterns` should be an array of strings. Found: %v", exPatternType)) - return - } - - // Value of each exclude pattern can only be a string - for _, ex_pattern := range c.Config.ExcludePatterns { - numValue, err := strconv.Atoi(ex_pattern) - if err == nil { - c.pushError(fmt.Sprintf("Value of `exclude_patterns` paths can only be string. Found: %v", numValue)) - } - } - } -} - -// Validates `test_patterns` field of the DeepSource config -func (c *ConfigValidator) validateTestPatterns() { - testPatterns := viper.Get("test_patterns") - - // Sometimes the user doesn't add `test_patterns` to the code - // Validate only if testPatterns present - if testPatterns != nil { - // Must be a slice - testPatternType := reflect.TypeOf(testPatterns).Kind().String() - if testPatternType != "slice" { - c.pushError(fmt.Sprintf("Value of `test_patterns` should be an array of objects. Found: %v", testPatternType)) - } - - // Value of each test pattern can only be a string - for _, test_pattern := range c.Config.TestPatterns { - numValue, err := strconv.Atoi(test_pattern) - if err == nil { - c.pushError(fmt.Sprintf("Value of `test_patterns` paths can only be string. Found: %v", numValue)) - } - } - } -} - -// Validates generic DeepSource config -func (c *ConfigValidator) validateGenericConfig() { - c.validateVersion() - c.validateExcludePatterns() - c.validateTestPatterns() -} diff --git a/configvalidator/generic_config_validator_test.go b/configvalidator/generic_config_validator_test.go deleted file mode 100644 index 13e5f874..00000000 --- a/configvalidator/generic_config_validator_test.go +++ /dev/null @@ -1,181 +0,0 @@ -package configvalidator - -import ( - "bytes" - "reflect" - "testing" - - "github.com/spf13/viper" -) - -func TestValidateVersion(t *testing.T) { - type test struct { - inputConfig string - valid bool - } - - tests := map[string]test{ - "valid config": { - inputConfig: "version = 1", - valid: true, - }, - "wrong version": { - inputConfig: "version = \"foobar\"", - valid: false, - }, - "version greater than maximum allowed": { - inputConfig: "version = 352", - valid: false, - }, - "version missing": { - inputConfig: "", - valid: false, - }, - "version of wrong type": { - inputConfig: "version = \"2\"", - valid: false, - }, - } - for testName, tc := range tests { - t.Run(testName, func(t *testing.T) { - testConfig, err := getConfig([]byte(tc.inputConfig)) - if err != nil { - t.Error(err) - } - c := &ConfigValidator{ - Config: *testConfig, - Result: Result{ - Valid: true, - Errors: []string{}, - ConfigReadError: false, - }, - } - c.validateVersion() - if !reflect.DeepEqual(tc.valid, c.Result.Valid) { - t.Fatalf("%v: expected: %v, got: %v. Error: %v", testName, tc.valid, c.Result.Valid, c.Result.Errors) - } - }) - } -} - -func TestValidateExcludePatterns(t *testing.T) { - type test struct { - inputConfig string - valid bool - } - - tests := map[string]test{ - "valid exclude_patterns": { - inputConfig: "version= 1\nexclude_patterns = 23", - valid: false, - }, - "should be array of string": { - inputConfig: "version= 1\nexclude_patterns = [23,43]", - valid: false, - }, - "valid array of string": { - inputConfig: "version = 1\nexclude_patterns = ['hello', 'world']", - valid: true, - }, - "strings with double quotes": { - inputConfig: "exclude_patterns = [\"hello\",\"world\"]", - valid: true, - }, - "empty exclude_patterns": { - inputConfig: "exclude_patterns = []", - valid: true, - }, - "cannot be only string, should be an array": { - inputConfig: "version = 1\nexclude_patterns = 'hello'", - valid: false, - }, - } - for testName, tc := range tests { - t.Run(testName, func(t *testing.T) { - testConfig, err := getConfig([]byte(tc.inputConfig)) - if err != nil { - t.Error(err) - } - c := &ConfigValidator{ - Config: *testConfig, - Result: Result{ - Valid: true, - Errors: []string{}, - ConfigReadError: false, - }, - } - c.validateExcludePatterns() - if !reflect.DeepEqual(tc.valid, c.Result.Valid) { - t.Fatalf("%v: Config : %v, expected: %v, got: %v. Error: %v", testName, tc.inputConfig, tc.valid, c.Result.Valid, c.Result.Errors) - } - }) - } -} - -func TestValidateTestPatterns(t *testing.T) { - type test struct { - inputConfig string - valid bool - } - - tests := map[string]test{ - "cannot be an integer": { - inputConfig: "test_patterns = 23", - valid: false, - }, - "cannot be an array of integers": { - inputConfig: "test_patterns = [23,43]", - valid: false, - }, - "should be array of strings": { - inputConfig: "test_patterns = ['hello', 'world']", - valid: true, - }, - "strings with double quotes": { - inputConfig: "test_patterns = [\"hello\",\"world\"]", - valid: true, - }, - "empty test_patterns": { - inputConfig: "test_patterns = []", - valid: true, - }, - "cannot be only string, should be an array of string": { - inputConfig: "test_patterns = 'hello'", - valid: false, - }, - } - for testName, tc := range tests { - t.Run(testName, func(t *testing.T) { - testConfig, err := getConfig([]byte(tc.inputConfig)) - if err != nil { - t.Error(err) - } - c := &ConfigValidator{ - Config: *testConfig, - Result: Result{ - Valid: true, - Errors: []string{}, - ConfigReadError: false, - }, - } - c.validateTestPatterns() - if !reflect.DeepEqual(tc.valid, c.Result.Valid) { - t.Fatalf("%v: Config : %v, expected: %v, got: %v. Error: %v", testName, tc.inputConfig, tc.valid, c.Result.Valid, c.Result.Errors) - } - }) - } -} - -// Receives a string of DeepSource config and returns its -// representation in the form of a DSConfig struct -func getConfig(inputConfig []byte) (*DSConfig, error) { - config := DSConfig{} - viper.SetConfigType("toml") - err := viper.ReadConfig(bytes.NewBuffer(inputConfig)) - if err != nil { - return nil, err - } - // Unmarshaling the configdata into DSConfig struct - viper.UnmarshalExact(&config) - return &config, nil -} diff --git a/configvalidator/transformer_config_validator.go b/configvalidator/transformer_config_validator.go deleted file mode 100644 index 0bd103d6..00000000 --- a/configvalidator/transformer_config_validator.go +++ /dev/null @@ -1,40 +0,0 @@ -package configvalidator - -import ( - "fmt" - "reflect" - - "github.com/deepsourcelabs/cli/utils" - "github.com/spf13/viper" -) - -// Validates Transformers Config -func (c *ConfigValidator) validateTransformersConfig() { - // If no transformer activated by user, return without any errors - if viper.Get("transformers") == nil { - return - } - - // Transformers should be an array - transformersType := reflect.TypeOf(c.Config.Transformers).Kind().String() - if transformersType != "slice" { - c.pushError(fmt.Sprintf("Value of `transformers` should be an array. Found: %v", transformersType)) - } - - // Enabled property validation is handled in the main config validator - // (transformers with invalid enabled types will cause unmarshaling errors) - - // ==== Transformer shortcode validation ==== - supported := false - for _, activatedTransformer := range c.Config.Transformers { - for _, supportedTransformer := range utils.TransformersData.TransformerShortcodes { - if activatedTransformer.Name == supportedTransformer { - supported = true - break - } - } - if !supported { - c.pushError(fmt.Sprintf("The Tranformer %s is not supported yet.", activatedTransformer.Name)) - } - } -} diff --git a/configvalidator/transformer_config_validator_test.go b/configvalidator/transformer_config_validator_test.go deleted file mode 100644 index 50514503..00000000 --- a/configvalidator/transformer_config_validator_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package configvalidator - -import ( - "reflect" - "testing" -) - -func TestValidateTransformersConfig(t *testing.T) { - setDummyAnalyzerTransformerData() - type test struct { - inputConfig string - result bool - } - - tests := map[string]test{ - "valid config": { - inputConfig: ` - [[transformers]] - name = "black" - enabled = true`, - result: true, - }, - "transformers are not mandatory lik}e analyzers": { - inputConfig: ` - [[transformers]] - name = "black" - enabled = false`, - result: true, - }, - "can't use unsupported analyzer": { - inputConfig: ` - [[transformers]] - name = "rick-astley" - enabled = true`, - result: false, - }, - "multiple transformers": { - inputConfig: ` - [[transformers]] - name = "black" - enabled = true - - [[transformers]] - name = "prettier" - enabled = true`, - result: true, - }, - } - for testName, tc := range tests { - t.Run(testName, func(t *testing.T) { - testConfig, err := getConfig([]byte(tc.inputConfig)) - if err != nil { - t.Error(err) - } - c := &ConfigValidator{ - Config: *testConfig, - Result: Result{ - Valid: true, - Errors: []string{}, - ConfigReadError: false, - }, - } - c.validateTransformersConfig() - if !reflect.DeepEqual(tc.result, c.Result.Valid) { - t.Errorf("expected: %v, got: %v. Error: %v", tc.result, c.Result.Valid, c.Result.Errors) - } - }) - } -} diff --git a/configvalidator/types.go b/configvalidator/types.go deleted file mode 100644 index 9c5cad4a..00000000 --- a/configvalidator/types.go +++ /dev/null @@ -1,24 +0,0 @@ -package configvalidator - -// DSConfig is the struct for .deepsource.toml file -type Analyzer struct { - Name string `mapstructure:"name,omitempty" json:"name,omitempty"` - RuntimeVersion string `mapstructure:"runtime_version,omitempty" json:"runtime_version,omitempty"` - Enabled *bool `mapstructure:"enabled,omitempty" json:"enabled,omitempty"` - DependencyFilePaths []string `mapstructure:"dependency_file_paths,omitempty" json:"dependency_file_paths,omitempty"` - Meta interface{} `mapstructure:"meta,omitempty" json:"meta,omitempty"` - Thresholds interface{} `mapstructure:"thresholds,omitempty" json:"thresholds,omitempty"` -} - -type Transformer struct { - Name string `mapstructure:"name,omitempty" json:"name,omitempty"` - Enabled *bool `mapstructure:"enabled,omitempty" json:"enabled,omitempty"` -} - -type DSConfig struct { - Version int `mapstructure:"version,omitempty" json:"version"` - ExcludePatterns []string `mapstructure:"exclude_patterns,omitempty" json:"exclude_patterns,omitempty"` - TestPatterns []string `mapstructure:"test_patterns,omitempty" json:"test_patterns,omitempty"` - Analyzers []Analyzer `mapstructure:"analyzers,omitempty" json:"analyzers,omitempty"` - Transformers []Transformer `mapstructure:"transformers,omitempty" json:"transformers,omitempty"` -} diff --git a/deepsource/analyzers/queries/get_analyzers.go b/deepsource/analyzers/queries/get_analyzers.go index 04e496d2..ddcd2cd2 100644 --- a/deepsource/analyzers/queries/get_analyzers.go +++ b/deepsource/analyzers/queries/get_analyzers.go @@ -4,8 +4,8 @@ import ( "context" "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/analyzers" - "github.com/deepsourcelabs/graphql" ) // GraphQL query @@ -22,7 +22,9 @@ const listAnalyzersQuery = ` } }` -type AnalyzersRequest struct{} +type AnalyzersRequest struct { + client graphqlclient.GraphQLClient +} type AnalyzersResponse struct { Analyzers struct { @@ -36,24 +38,14 @@ type AnalyzersResponse struct { } `json:"analyzers"` } -// GraphQL client interface -type IGQLClient interface { - GQL() *graphql.Client - GetToken() string +func NewAnalyzersRequest(client graphqlclient.GraphQLClient) *AnalyzersRequest { + return &AnalyzersRequest{client: client} } -func (a AnalyzersRequest) Do(ctx context.Context, client IGQLClient) ([]analyzers.Analyzer, error) { - req := graphql.NewRequest(listAnalyzersQuery) - - // set header fields - req.Header.Set("Cache-Control", "no-cache") - tokenHeader := fmt.Sprintf("Bearer %s", client.GetToken()) - req.Header.Add("Authorization", tokenHeader) - - // run it and capture the response +func (a *AnalyzersRequest) Do(ctx context.Context) ([]analyzers.Analyzer, error) { var respData AnalyzersResponse - if err := client.GQL().Run(ctx, req, &respData); err != nil { - return nil, err + if err := a.client.Query(ctx, listAnalyzersQuery, nil, &respData); err != nil { + return nil, fmt.Errorf("Fetch analyzers: %w", err) } // Formatting the query response w.r.t the output format diff --git a/deepsource/auth/mutations/refresh_pat.go b/deepsource/auth/mutations/refresh_pat.go index 963cac46..44012447 100644 --- a/deepsource/auth/mutations/refresh_pat.go +++ b/deepsource/auth/mutations/refresh_pat.go @@ -4,8 +4,8 @@ import ( "context" "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/auth" - "github.com/deepsourcelabs/graphql" ) // GraphQL query to refresh token @@ -25,6 +25,7 @@ type RefreshTokenParams struct { } type RefreshTokenRequest struct { + client graphqlclient.GraphQLClient Params RefreshTokenParams } @@ -32,17 +33,15 @@ type RefreshTokenResponse struct { auth.PAT `json:"refreshPat"` } -func (r RefreshTokenRequest) Do(ctx context.Context, client IGQLClient) (*auth.PAT, error) { - req := graphql.NewRequest(refreshTokenQuery) - - // set header fields - req.Header.Set("Cache-Control", "no-cache") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", r.Params.Token)) +func NewRefreshTokenRequest(client graphqlclient.GraphQLClient, params RefreshTokenParams) *RefreshTokenRequest { + return &RefreshTokenRequest{client: client, Params: params} +} - // run it and capture the response +func (r *RefreshTokenRequest) Do(ctx context.Context) (*auth.PAT, error) { + r.client.SetAuthToken(r.Params.Token) var respData RefreshTokenResponse - if err := client.GQL().Run(ctx, req, &respData); err != nil { - return nil, err + if err := r.client.Mutate(ctx, refreshTokenQuery, nil, &respData); err != nil { + return nil, fmt.Errorf("Refresh PAT: %w", err) } return &respData.PAT, nil diff --git a/deepsource/auth/mutations/register_device.go b/deepsource/auth/mutations/register_device.go index 57292521..7f39918f 100644 --- a/deepsource/auth/mutations/register_device.go +++ b/deepsource/auth/mutations/register_device.go @@ -2,9 +2,10 @@ package auth import ( "context" + "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/auth" - "github.com/deepsourcelabs/graphql" ) // GraphQL mutation to register Device get a device code @@ -19,23 +20,22 @@ const registerDeviceMutation = `mutation register { } }` -type RegisterDeviceRequest struct{} +type RegisterDeviceRequest struct { + client graphqlclient.GraphQLClient +} type RegisterDeviceResponse struct { auth.Device `json:"registerDevice"` } -type IGQLClient interface { - GQL() *graphql.Client +func NewRegisterDeviceRequest(client graphqlclient.GraphQLClient) *RegisterDeviceRequest { + return &RegisterDeviceRequest{client: client} } -func (r RegisterDeviceRequest) Do(ctx context.Context, client IGQLClient) (*auth.Device, error) { - req := graphql.NewRequest(registerDeviceMutation) - req.Header.Set("Cache-Control", "no-cache") - +func (r *RegisterDeviceRequest) Do(ctx context.Context) (*auth.Device, error) { var res RegisterDeviceResponse - if err := client.GQL().Run(ctx, req, &res); err != nil { - return nil, err + if err := r.client.Mutate(ctx, registerDeviceMutation, nil, &res); err != nil { + return nil, fmt.Errorf("Register device: %w", err) } return &res.Device, nil diff --git a/deepsource/auth/mutations/request_pat.go b/deepsource/auth/mutations/request_pat.go index faf0a31b..5dc7ca69 100644 --- a/deepsource/auth/mutations/request_pat.go +++ b/deepsource/auth/mutations/request_pat.go @@ -2,9 +2,10 @@ package auth import ( "context" + "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/auth" - "github.com/deepsourcelabs/graphql" ) type RequestPATParams struct { @@ -13,6 +14,7 @@ type RequestPATParams struct { } type RequestPATRequest struct { + client graphqlclient.GraphQLClient Params RequestPATParams } @@ -32,14 +34,15 @@ type RequestPATResponse struct { auth.PAT `json:"requestPatWithDeviceCode"` } -func (r RequestPATRequest) Do(ctx context.Context, client IGQLClient) (*auth.PAT, error) { - req := graphql.NewRequest(requestPATMutation) - req.Header.Set("Cache-Control", "no-cache") - req.Var("input", r.Params) +func NewRequestPATRequest(client graphqlclient.GraphQLClient, params RequestPATParams) *RequestPATRequest { + return &RequestPATRequest{client: client, Params: params} +} +func (r *RequestPATRequest) Do(ctx context.Context) (*auth.PAT, error) { var res RequestPATResponse - if err := client.GQL().Run(ctx, req, &res); err != nil { - return nil, err + vars := map[string]interface{}{"input": r.Params} + if err := r.client.Mutate(ctx, requestPATMutation, vars, &res); err != nil { + return nil, fmt.Errorf("Request PAT: %w", err) } return &res.PAT, nil diff --git a/deepsource/client.go b/deepsource/client.go index 75264ad4..9e324dc3 100644 --- a/deepsource/client.go +++ b/deepsource/client.go @@ -9,25 +9,40 @@ import ( analyzerQuery "github.com/deepsourcelabs/cli/deepsource/analyzers/queries" "github.com/deepsourcelabs/cli/deepsource/auth" authmut "github.com/deepsourcelabs/cli/deepsource/auth/mutations" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/issues" issuesQuery "github.com/deepsourcelabs/cli/deepsource/issues/queries" + "github.com/deepsourcelabs/cli/deepsource/metrics" + metricsQuery "github.com/deepsourcelabs/cli/deepsource/metrics/queries" + "github.com/deepsourcelabs/cli/deepsource/vulnerabilities" + vulnerabilitiesQuery "github.com/deepsourcelabs/cli/deepsource/vulnerabilities/queries" "github.com/deepsourcelabs/cli/deepsource/repository" repoQuery "github.com/deepsourcelabs/cli/deepsource/repository/queries" + "github.com/deepsourcelabs/cli/deepsource/runs" + runsQuery "github.com/deepsourcelabs/cli/deepsource/runs/queries" "github.com/deepsourcelabs/cli/deepsource/transformers" transformerQuery "github.com/deepsourcelabs/cli/deepsource/transformers/queries" + "github.com/deepsourcelabs/cli/deepsource/user" + userQuery "github.com/deepsourcelabs/cli/deepsource/user/queries" "github.com/deepsourcelabs/graphql" ) -var defaultHostName = "deepsource.io" +var defaultHostName = "deepsource.com" type ClientOpts struct { Token string HostName string + + // OnTokenRefreshed is called after a successful automatic token refresh. + // If set, enables transparent token refresh when API calls fail due to + // expired tokens. The callback should persist the new credentials. + OnTokenRefreshed func(token, expiry, email string) } type Client struct { - gql *graphql.Client - token string + gql *graphql.Client + gqlWrapper graphqlclient.GraphQLClient + token string } // Returns a GraphQL client which can be used to interact with the GQL APIs @@ -40,14 +55,36 @@ func (c Client) GetToken() string { return c.token } +// NewWithGraphQLClient creates a Client that uses the given GraphQL client +// directly. Intended for tests where a MockClient provides canned responses. +func NewWithGraphQLClient(gql graphqlclient.GraphQLClient) *Client { + return &Client{gqlWrapper: gql} +} + // Returns a new GQLClient func New(cp ClientOpts) (*Client, error) { apiClientURL := getAPIClientURL(cp.HostName) gql := graphql.NewClient(apiClientURL) - return &Client{ + + c := &Client{ gql: gql, token: cp.Token, - }, nil + } + + if cp.OnTokenRefreshed != nil { + c.gqlWrapper = graphqlclient.NewWithClientAndRefresher(gql, cp.Token, func(ctx context.Context, currentToken string) (string, error) { + pat, err := c.RefreshAuthCreds(ctx, currentToken) + if err != nil { + return "", err + } + cp.OnTokenRefreshed(pat.Token, pat.Expiry, pat.User.Email) + return pat.Token, nil + }) + } else { + c.gqlWrapper = graphqlclient.NewWithClient(gql, cp.Token) + } + + return c, nil } // // Formats and returns the DeepSource Public API client URL @@ -64,8 +101,8 @@ func getAPIClientURL(hostName string) string { // Registers the device and allots it a device code which is further used for fetching // the PAT and other authentication data func (c Client) RegisterDevice(ctx context.Context) (*auth.Device, error) { - req := authmut.RegisterDeviceRequest{} - res, err := req.Do(ctx, c) + req := authmut.NewRegisterDeviceRequest(c.gqlWrapper) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -74,14 +111,12 @@ func (c Client) RegisterDevice(ctx context.Context) (*auth.Device, error) { // Logs in the client using the deviceCode and the user Code and returns the PAT and data which is required for authentication func (c Client) Login(ctx context.Context, deviceCode, description string) (*auth.PAT, error) { - req := authmut.RequestPATRequest{ - Params: authmut.RequestPATParams{ - DeviceCode: deviceCode, - Description: description, - }, - } + req := authmut.NewRequestPATRequest(c.gqlWrapper, authmut.RequestPATParams{ + DeviceCode: deviceCode, + Description: description, + }) - res, err := req.Do(ctx, c) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -90,12 +125,10 @@ func (c Client) Login(ctx context.Context, deviceCode, description string) (*aut // Refreshes the authentication credentials. Takes the refreshToken as a parameter. func (c Client) RefreshAuthCreds(ctx context.Context, token string) (*auth.PAT, error) { - req := authmut.RefreshTokenRequest{ - Params: authmut.RefreshTokenParams{ - Token: token, - }, - } - res, err := req.Do(ctx, c) + req := authmut.NewRefreshTokenRequest(c.gqlWrapper, authmut.RefreshTokenParams{ + Token: token, + }) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -104,8 +137,8 @@ func (c Client) RefreshAuthCreds(ctx context.Context, token string) (*auth.PAT, // Returns the list of Analyzers supported by DeepSource along with their meta like shortcode, metaschema. func (c Client) GetSupportedAnalyzers(ctx context.Context) ([]analyzers.Analyzer, error) { - req := analyzerQuery.AnalyzersRequest{} - res, err := req.Do(ctx, c) + req := analyzerQuery.NewAnalyzersRequest(c.gqlWrapper) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -114,8 +147,8 @@ func (c Client) GetSupportedAnalyzers(ctx context.Context) ([]analyzers.Analyzer // Returns the list of Transformers supported by DeepSource along with their meta like shortcode. func (c Client) GetSupportedTransformers(ctx context.Context) ([]transformers.Transformer, error) { - req := transformerQuery.TransformersRequest{} - res, err := req.Do(ctx, c) + req := transformerQuery.NewTransformersRequest(c.gqlWrapper) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -127,15 +160,12 @@ func (c Client) GetSupportedTransformers(ctx context.Context) ([]transformers.Tr // repoName : The name of the repository whose activation status has to be queried // provider : The VCS provider which hosts the repo (GITHUB/GITLAB/BITBUCKET) func (c Client) GetRepoStatus(ctx context.Context, owner, repoName, provider string) (*repository.Meta, error) { - req := repoQuery.RepoStatusRequest{ - Params: repoQuery.RepoStatusParams{ - Owner: owner, - RepoName: repoName, - Provider: provider, - }, - } - - res, err := req.Do(ctx, c) + req := repoQuery.NewRepoStatusRequest(c.gqlWrapper, repoQuery.RepoStatusParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + }) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -148,15 +178,13 @@ func (c Client) GetRepoStatus(ctx context.Context, owner, repoName, provider str // provider : The VCS provider which hosts the repo (GITHUB/GITLAB/BITBUCKET) // limit : The amount of issues to be listed. The default limit is 30 while the maximum limit is currently 100. func (c Client) GetIssues(ctx context.Context, owner, repoName, provider string, limit int) ([]issues.Issue, error) { - req := issuesQuery.IssuesListRequest{ - Params: issuesQuery.IssuesListParams{ - Owner: owner, - RepoName: repoName, - Provider: provider, - Limit: limit, - }, - } - res, err := req.Do(ctx, c) + req := issuesQuery.NewIssuesListRequest(c.gqlWrapper, issuesQuery.IssuesListParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + Limit: limit, + }) + res, err := req.Do(ctx) if err != nil { return nil, err } @@ -171,17 +199,184 @@ func (c Client) GetIssues(ctx context.Context, owner, repoName, provider string, // filePath : The relative path of the file. Eg: "tests/mock.py" if a file `mock.py` is present in `tests` directory which in turn is present in the root dir // limit : The amount of issues to be listed. The default limit is 30 while the maximum limit is currently 100. func (c Client) GetIssuesForFile(ctx context.Context, owner, repoName, provider, filePath string, limit int) ([]issues.Issue, error) { - req := issuesQuery.FileIssuesListRequest{ - Params: issuesQuery.FileIssuesListParams{ - Owner: owner, - RepoName: repoName, - Provider: provider, - FilePath: filePath, - Limit: limit, - }, + req := issuesQuery.NewFileIssuesListRequest(c.gqlWrapper, issuesQuery.FileIssuesListParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + FilePath: filePath, + Limit: limit, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns details of the authenticated user. +func (c Client) GetViewer(ctx context.Context) (*user.User, error) { + req := userQuery.NewViewerRequest(c.gqlWrapper) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns the list of analysis runs for a repository. +// owner : The username of the owner of the repository +// repoName : The name of the repository +// provider : The VCS provider which hosts the repo (GITHUB/GITLAB/BITBUCKET) +// limit : The number of analysis runs to fetch +func (c Client) GetAnalysisRuns(ctx context.Context, owner, repoName, provider string, limit int) ([]runs.AnalysisRun, error) { + req := runsQuery.NewAnalysisRunsListRequest(c.gqlWrapper, runsQuery.AnalysisRunsListParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + Limit: limit, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns the issues for a specific analysis run. +// commitOid : The commit OID of the analysis run +func (c Client) GetRunIssues(ctx context.Context, commitOid string) (*runs.RunWithIssues, error) { + req := runsQuery.NewRunIssuesRequest(c.gqlWrapper, runsQuery.RunIssuesParams{ + CommitOid: commitOid, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns issues for a specific run as a flat list (for issues --commit). +func (c Client) GetRunIssuesFlat(ctx context.Context, commitOid string, limit int) ([]issues.Issue, error) { + req := issuesQuery.NewRunIssuesFlatRequest(c.gqlWrapper, issuesQuery.RunIssuesFlatParams{ + CommitOid: commitOid, + Limit: limit, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns issues for a specific pull request. +func (c Client) GetPRIssues(ctx context.Context, owner, repoName, provider string, prNumber, limit int) ([]issues.Issue, error) { + req := issuesQuery.NewPRIssuesListRequest(c.gqlWrapper, issuesQuery.PRIssuesListParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + PRNumber: prNumber, + Limit: limit, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns metrics for a repository's default branch. +func (c Client) GetRepoMetrics(ctx context.Context, owner, repoName, provider string) ([]metrics.RepositoryMetric, error) { + req := metricsQuery.NewRepoMetricsRequest(c.gqlWrapper, metricsQuery.RepoMetricsParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns metrics for a specific analysis run. +func (c Client) GetRunMetrics(ctx context.Context, commitOid string) (*metrics.RunMetrics, error) { + req := metricsQuery.NewRunMetricsRequest(c.gqlWrapper, metricsQuery.RunMetricsParams{ + CommitOid: commitOid, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns metrics for a specific pull request. +func (c Client) GetPRMetrics(ctx context.Context, owner, repoName, provider string, prNumber int) (*metrics.PRMetrics, error) { + req := metricsQuery.NewPRMetricsRequest(c.gqlWrapper, metricsQuery.PRMetricsParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + PRNumber: prNumber, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err } + return res, nil +} + +// Returns vulnerabilities for a repository's default branch. +func (c Client) GetRepoVulns(ctx context.Context, owner, repoName, provider string, limit int) ([]vulnerabilities.VulnerabilityOccurrence, error) { + req := vulnerabilitiesQuery.NewRepoVulnsRequest(c.gqlWrapper, vulnerabilitiesQuery.RepoVulnsParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + Limit: limit, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns vulnerabilities for a specific analysis run. +func (c Client) GetRunVulns(ctx context.Context, commitOid string, limit int) (*vulnerabilities.RunVulns, error) { + req := vulnerabilitiesQuery.NewRunVulnsRequest(c.gqlWrapper, vulnerabilitiesQuery.RunVulnsParams{ + CommitOid: commitOid, + Limit: limit, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} + +// Returns the list of enabled analyzers for a repository. +func (c Client) GetEnabledAnalyzers(ctx context.Context, owner, repoName, provider string) ([]analyzers.Analyzer, error) { + req := repoQuery.NewEnabledAnalyzersRequest(c.gqlWrapper, repoQuery.EnabledAnalyzersParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + }) + res, err := req.Do(ctx) + if err != nil { + return nil, err + } + return res, nil +} - res, err := req.Do(ctx, c) +// Returns vulnerabilities for a specific pull request. +func (c Client) GetPRVulns(ctx context.Context, owner, repoName, provider string, prNumber, limit int) (*vulnerabilities.PRVulns, error) { + req := vulnerabilitiesQuery.NewPRVulnsRequest(c.gqlWrapper, vulnerabilitiesQuery.PRVulnsParams{ + Owner: owner, + RepoName: repoName, + Provider: provider, + PRNumber: prNumber, + Limit: limit, + }) + res, err := req.Do(ctx) if err != nil { return nil, err } diff --git a/deepsource/graphqlclient/client.go b/deepsource/graphqlclient/client.go new file mode 100644 index 00000000..9abcfa20 --- /dev/null +++ b/deepsource/graphqlclient/client.go @@ -0,0 +1,10 @@ +package graphqlclient + +import "context" + +// GraphQLClient defines the contract for GraphQL operations. +type GraphQLClient interface { + Query(ctx context.Context, query string, vars map[string]interface{}, result interface{}) error + Mutate(ctx context.Context, mutation string, vars map[string]interface{}, result interface{}) error + SetAuthToken(token string) +} diff --git a/deepsource/graphqlclient/errors.go b/deepsource/graphqlclient/errors.go new file mode 100644 index 00000000..ff460abe --- /dev/null +++ b/deepsource/graphqlclient/errors.go @@ -0,0 +1,26 @@ +package graphqlclient + +import "fmt" + +// GraphQLError wraps errors from GraphQL operations. +type GraphQLError struct { + Operation string + Query string + Cause error +} + +func (e *GraphQLError) Error() string { + return fmt.Sprintf("GraphQL %s failed: %v", e.Operation, e.Cause) +} + +func (e *GraphQLError) Unwrap() error { + return e.Cause +} + +// TruncateQuery trims long queries for error messages. +func TruncateQuery(query string) string { + if len(query) > 100 { + return query[:100] + "..." + } + return query +} diff --git a/deepsource/graphqlclient/mock.go b/deepsource/graphqlclient/mock.go new file mode 100644 index 00000000..7f7f479e --- /dev/null +++ b/deepsource/graphqlclient/mock.go @@ -0,0 +1,32 @@ +package graphqlclient + +import "context" + +// MockClient is a mock implementation for testing. +type MockClient struct { + QueryFunc func(ctx context.Context, query string, vars map[string]interface{}, result interface{}) error + MutateFunc func(ctx context.Context, mutation string, vars map[string]interface{}, result interface{}) error + token string +} + +func NewMockClient() *MockClient { + return &MockClient{} +} + +func (m *MockClient) Query(ctx context.Context, query string, vars map[string]interface{}, result interface{}) error { + if m.QueryFunc != nil { + return m.QueryFunc(ctx, query, vars, result) + } + return nil +} + +func (m *MockClient) Mutate(ctx context.Context, mutation string, vars map[string]interface{}, result interface{}) error { + if m.MutateFunc != nil { + return m.MutateFunc(ctx, mutation, vars, result) + } + return nil +} + +func (m *MockClient) SetAuthToken(token string) { + m.token = token +} diff --git a/deepsource/graphqlclient/wrapper.go b/deepsource/graphqlclient/wrapper.go new file mode 100644 index 00000000..27b505ab --- /dev/null +++ b/deepsource/graphqlclient/wrapper.go @@ -0,0 +1,104 @@ +package graphqlclient + +import ( + "context" + "fmt" + "strings" + + "github.com/deepsourcelabs/graphql" +) + +// TokenRefresher is called when a request fails due to an expired token. +// It receives the current token and should return a new valid token. +type TokenRefresher func(ctx context.Context, currentToken string) (newToken string, err error) + +type wrapper struct { + client *graphql.Client + token string + refresher TokenRefresher + refreshing bool +} + +// New creates a GraphQL client wrapper. +func New(url string, token string) GraphQLClient { + return &wrapper{ + client: graphql.NewClient(url), + token: token, + } +} + +// NewWithClient creates a GraphQL client wrapper using an existing graphql.Client. +func NewWithClient(client *graphql.Client, token string) GraphQLClient { + return &wrapper{ + client: client, + token: token, + } +} + +// NewWithClientAndRefresher creates a GraphQL client wrapper with auto-refresh support. +// When a request fails with an expired token error, the refresher is called to obtain +// a new token and the request is retried once. +func NewWithClientAndRefresher(client *graphql.Client, token string, refresher TokenRefresher) GraphQLClient { + return &wrapper{ + client: client, + token: token, + refresher: refresher, + } +} + +func (w *wrapper) Query(ctx context.Context, query string, vars map[string]interface{}, result interface{}) error { + return w.run(ctx, query, vars, result, "query") +} + +func (w *wrapper) Mutate(ctx context.Context, mutation string, vars map[string]interface{}, result interface{}) error { + return w.run(ctx, mutation, vars, result, "mutation") +} + +func (w *wrapper) SetAuthToken(token string) { + w.token = token +} + +func (w *wrapper) exec(ctx context.Context, query string, vars map[string]interface{}, result interface{}, op string) error { + req := graphql.NewRequest(query) + req.Header.Set("Cache-Control", "no-cache") + if w.token != "" { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", w.token)) + } + for key, value := range vars { + req.Var(key, value) + } + + if err := w.client.Run(ctx, req, result); err != nil { + return &GraphQLError{ + Operation: op, + Query: TruncateQuery(query), + Cause: err, + } + } + return nil +} + +func (w *wrapper) run(ctx context.Context, query string, vars map[string]interface{}, result interface{}, op string) error { + err := w.exec(ctx, query, vars, result, op) + if err == nil { + return nil + } + + if !w.refreshing && w.refresher != nil && isTokenExpired(err) { + w.refreshing = true + defer func() { w.refreshing = false }() + + newToken, refreshErr := w.refresher(ctx, w.token) + if refreshErr != nil { + return fmt.Errorf("Token expired and refresh failed, run \"deepsource auth login\" to re-authenticate: %w", refreshErr) + } + w.token = newToken + return w.exec(ctx, query, vars, result, op) + } + + return err +} + +func isTokenExpired(err error) bool { + return strings.Contains(err.Error(), "Signature has expired") +} diff --git a/deepsource/issues/issues_list.go b/deepsource/issues/issues_list.go index 2f23b81b..6ebf0f15 100644 --- a/deepsource/issues/issues_list.go +++ b/deepsource/issues/issues_list.go @@ -11,6 +11,7 @@ type Location struct { } type AnalyzerMeta struct { + Name string `json:"name"` // Analyzer name (human-readable) Shortcode string `json:"analyzer"` // Analyzer shortcode } @@ -19,6 +20,8 @@ type Issue struct { IssueCode string `json:"issue_code"` // DeepSource code for the issue reported IssueCategory string `json:"issue_category"` // Category of the issue reported IssueSeverity string `json:"issue_severity"` // Severity of the issue reported + IssueSource string `json:"issue_source"` // Source of the issue (STATIC or AI) + Description string `json:"description"` // Short description / explanation of the issue Location Location `json:"location"` // The location data for the issue reported Analyzer AnalyzerMeta // The Analyzer which raised the issue } diff --git a/deepsource/issues/queries/list_file_issues.go b/deepsource/issues/queries/list_file_issues.go index cf7817b7..dfc82863 100644 --- a/deepsource/issues/queries/list_file_issues.go +++ b/deepsource/issues/queries/list_file_issues.go @@ -5,8 +5,8 @@ import ( "context" "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/issues" - "github.com/deepsourcelabs/graphql" ) // Query to fetch issues for a certain file specified by the user @@ -60,6 +60,7 @@ type FileIssuesListParams struct { // Request struct type FileIssuesListRequest struct { + client graphqlclient.GraphQLClient Params FileIssuesListParams } @@ -94,33 +95,21 @@ type FileIssuesResponse struct { } `json:"repository"` } -// GraphQL client interface -type IGQLClient interface { - GQL() *graphql.Client - GetToken() string +func NewFileIssuesListRequest(client graphqlclient.GraphQLClient, params FileIssuesListParams) *FileIssuesListRequest { + return &FileIssuesListRequest{client: client, Params: params} } -func (f FileIssuesListRequest) Do(ctx context.Context, client IGQLClient) ([]issues.Issue, error) { - req := graphql.NewRequest(fetchFileIssuesQuery) - req.Header.Set("Cache-Control", "no-cache") - - req.Var("name", f.Params.RepoName) - req.Var("owner", f.Params.Owner) - req.Var("provider", f.Params.Provider) - req.Var("path", f.Params.FilePath) - req.Var("limit", f.Params.Limit) - - // set header fields - req.Header.Set("Cache-Control", "no-cache") - - // Adding token as header for auth - tokenHeader := fmt.Sprintf("Bearer %s", client.GetToken()) - req.Header.Add("Authorization", tokenHeader) - - // run it and capture the response +func (f *FileIssuesListRequest) Do(ctx context.Context) ([]issues.Issue, error) { + vars := map[string]interface{}{ + "name": f.Params.RepoName, + "owner": f.Params.Owner, + "provider": f.Params.Provider, + "path": f.Params.FilePath, + "limit": f.Params.Limit, + } var respData FileIssuesResponse - if err := client.GQL().Run(ctx, req, &respData); err != nil { - return nil, err + if err := f.client.Query(ctx, fetchFileIssuesQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("List file issues: %w", err) } // Formatting the query response w.r.t the output format of the SDK as specified in `issues_list.go` diff --git a/deepsource/issues/queries/list_issues.go b/deepsource/issues/queries/list_issues.go index 61f69d4f..956acfb2 100644 --- a/deepsource/issues/queries/list_issues.go +++ b/deepsource/issues/queries/list_issues.go @@ -5,8 +5,8 @@ import ( "context" "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/issues" - "github.com/deepsourcelabs/graphql" ) const fetchAllIssuesQuery = `query GetAllIssues( @@ -28,6 +28,7 @@ const fetchAllIssuesQuery = `query GetAllIssues( issue { title shortcode + shortDescription category severity isRecommended @@ -53,6 +54,7 @@ type IssuesListParams struct { } type IssuesListRequest struct { + client graphqlclient.GraphQLClient Params IssuesListParams } @@ -67,13 +69,14 @@ type IssuesListResponse struct { Path string `json:"path"` BeginLine int `json:"beginLine"` EndLine int `json:"endLine"` - Issue struct { - Title string `json:"title"` - Shortcode string `json:"shortcode"` - Category string `json:"category"` - Severity string `json:"severity"` - IsRecommended bool `json:"isRecommended"` - Analyzer struct { + Issue struct { + Title string `json:"title"` + Shortcode string `json:"shortcode"` + ShortDescription string `json:"shortDescription"` + Category string `json:"category"` + Severity string `json:"severity"` + IsRecommended bool `json:"isRecommended"` + Analyzer struct { Name string `json:"name"` Shortcode string `json:"shortcode"` } `json:"analyzer"` @@ -87,23 +90,20 @@ type IssuesListResponse struct { } `json:"repository"` } -func (i IssuesListRequest) Do(ctx context.Context, client IGQLClient) ([]issues.Issue, error) { - req := graphql.NewRequest(fetchAllIssuesQuery) - req.Var("name", i.Params.RepoName) - req.Var("owner", i.Params.Owner) - req.Var("provider", i.Params.Provider) - req.Var("limit", i.Params.Limit) - - // set header fields - req.Header.Set("Cache-Control", "no-cache") - // Adding PAT as a header for authentication - tokenHeader := fmt.Sprintf("Bearer %s", client.GetToken()) - req.Header.Add("Authorization", tokenHeader) +func NewIssuesListRequest(client graphqlclient.GraphQLClient, params IssuesListParams) *IssuesListRequest { + return &IssuesListRequest{client: client, Params: params} +} - // run it and capture the response +func (i *IssuesListRequest) Do(ctx context.Context) ([]issues.Issue, error) { + vars := map[string]interface{}{ + "name": i.Params.RepoName, + "owner": i.Params.Owner, + "provider": i.Params.Provider, + "limit": i.Params.Limit, + } var respData IssuesListResponse - if err := client.GQL().Run(ctx, req, &respData); err != nil { - return nil, err + if err := i.client.Query(ctx, fetchAllIssuesQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("List issues: %w", err) } issuesData := []issues.Issue{} @@ -125,7 +125,9 @@ func (i IssuesListRequest) Do(ctx context.Context, client IGQLClient) ([]issues. EndLine: occurenceEdge.Node.EndLine, }, }, + Description: occurenceEdge.Node.Issue.ShortDescription, Analyzer: issues.AnalyzerMeta{ + Name: occurenceEdge.Node.Issue.Analyzer.Name, Shortcode: occurenceEdge.Node.Issue.Analyzer.Shortcode, }, } diff --git a/deepsource/issues/queries/pr_issues.go b/deepsource/issues/queries/pr_issues.go new file mode 100644 index 00000000..d8f3b98e --- /dev/null +++ b/deepsource/issues/queries/pr_issues.go @@ -0,0 +1,127 @@ +// Lists the issues from a pull request +package issues + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/issues" +) + +const fetchPRIssuesQuery = `query GetPRIssues( + $name: String! + $owner: String! + $provider: VCSProvider! + $prNumber: Int! + $limit: Int! +) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + pullRequest(number: $prNumber) { + issueOccurrences(first: $limit) { + edges { + node { + path + beginLine + endLine + title + issue { + shortcode + shortDescription + category + severity + analyzer { + name + shortcode + } + } + } + } + } + } + } +}` + +type PRIssuesListParams struct { + Owner string + RepoName string + Provider string + PRNumber int + Limit int +} + +type PRIssuesListRequest struct { + client graphqlclient.GraphQLClient + Params PRIssuesListParams +} + +type PRIssuesListResponse struct { + Repository struct { + PullRequest struct { + IssueOccurrences struct { + Edges []struct { + Node struct { + Path string `json:"path"` + BeginLine int `json:"beginLine"` + EndLine int `json:"endLine"` + Title string `json:"title"` + Issue struct { + Shortcode string `json:"shortcode"` + ShortDescription string `json:"shortDescription"` + Category string `json:"category"` + Severity string `json:"severity"` + Analyzer struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + } `json:"analyzer"` + } `json:"issue"` + } `json:"node"` + } `json:"edges"` + } `json:"issueOccurrences"` + } `json:"pullRequest"` + } `json:"repository"` +} + +func NewPRIssuesListRequest(client graphqlclient.GraphQLClient, params PRIssuesListParams) *PRIssuesListRequest { + return &PRIssuesListRequest{client: client, Params: params} +} + +func (r *PRIssuesListRequest) Do(ctx context.Context) ([]issues.Issue, error) { + vars := map[string]interface{}{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + "prNumber": r.Params.PRNumber, + "limit": r.Params.Limit, + } + var respData PRIssuesListResponse + if err := r.client.Query(ctx, fetchPRIssuesQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("List PR issues: %w", err) + } + + result := make([]issues.Issue, 0) + for _, edge := range respData.Repository.PullRequest.IssueOccurrences.Edges { + node := edge.Node + issue := issues.Issue{ + IssueText: node.Title, + IssueCode: node.Issue.Shortcode, + IssueCategory: node.Issue.Category, + IssueSeverity: node.Issue.Severity, + Description: node.Issue.ShortDescription, + Location: issues.Location{ + Path: node.Path, + Position: issues.Position{ + BeginLine: node.BeginLine, + EndLine: node.EndLine, + }, + }, + Analyzer: issues.AnalyzerMeta{ + Name: node.Issue.Analyzer.Name, + Shortcode: node.Issue.Analyzer.Shortcode, + }, + } + result = append(result, issue) + } + + return result, nil +} diff --git a/deepsource/issues/queries/run_issues.go b/deepsource/issues/queries/run_issues.go new file mode 100644 index 00000000..c644166c --- /dev/null +++ b/deepsource/issues/queries/run_issues.go @@ -0,0 +1,125 @@ +// Lists the issues from a specific run (flattened to Issue type) +package issues + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/issues" +) + +const fetchRunIssuesFlatQuery = `query GetRunIssues($commitOid: String!, $limit: Int!) { + run(commitOid: $commitOid) { + checks { + edges { + node { + analyzer { + name + shortcode + } + issues(first: $limit) { + edges { + node { + source + path + beginLine + endLine + title + shortcode + explanation + category + severity + } + } + } + } + } + } + } +}` + +type RunIssuesFlatParams struct { + CommitOid string + Limit int +} + +type RunIssuesFlatRequest struct { + client graphqlclient.GraphQLClient + Params RunIssuesFlatParams +} + +type RunIssuesFlatResponse struct { + Run struct { + Checks struct { + Edges []struct { + Node struct { + Analyzer struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + } `json:"analyzer"` + Issues struct { + Edges []struct { + Node struct { + Source string `json:"source"` + Path string `json:"path"` + BeginLine int `json:"beginLine"` + EndLine int `json:"endLine"` + Title string `json:"title"` + Shortcode string `json:"shortcode"` + Explanation string `json:"explanation"` + Category string `json:"category"` + Severity string `json:"severity"` + } `json:"node"` + } `json:"edges"` + } `json:"issues"` + } `json:"node"` + } `json:"edges"` + } `json:"checks"` + } `json:"run"` +} + +func NewRunIssuesFlatRequest(client graphqlclient.GraphQLClient, params RunIssuesFlatParams) *RunIssuesFlatRequest { + return &RunIssuesFlatRequest{client: client, Params: params} +} + +func (r *RunIssuesFlatRequest) Do(ctx context.Context) ([]issues.Issue, error) { + vars := map[string]interface{}{ + "commitOid": r.Params.CommitOid, + "limit": r.Params.Limit, + } + var respData RunIssuesFlatResponse + if err := r.client.Query(ctx, fetchRunIssuesFlatQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("List run issues: %w", err) + } + + result := make([]issues.Issue, 0) + for _, checkEdge := range respData.Run.Checks.Edges { + check := checkEdge.Node + for _, issueEdge := range check.Issues.Edges { + node := issueEdge.Node + issue := issues.Issue{ + IssueText: node.Title, + IssueCode: node.Shortcode, + IssueCategory: node.Category, + IssueSeverity: node.Severity, + IssueSource: node.Source, + Description: node.Explanation, + Location: issues.Location{ + Path: node.Path, + Position: issues.Position{ + BeginLine: node.BeginLine, + EndLine: node.EndLine, + }, + }, + Analyzer: issues.AnalyzerMeta{ + Name: check.Analyzer.Name, + Shortcode: check.Analyzer.Shortcode, + }, + } + result = append(result, issue) + } + } + + return result, nil +} diff --git a/deepsource/metrics/queries/pr.go b/deepsource/metrics/queries/pr.go new file mode 100644 index 00000000..f9d2dc42 --- /dev/null +++ b/deepsource/metrics/queries/pr.go @@ -0,0 +1,132 @@ +// Fetches metrics from a pull request +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/metrics" +) + +const fetchPRMetricsQuery = `query GetPRMetrics( + $name: String! + $owner: String! + $provider: VCSProvider! + $prNumber: Int! +) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + pullRequest(number: $prNumber) { + number + title + baseBranch + branch + metrics { + name + shortcode + description + positiveDirection + unit + isReported + isThresholdEnforced + items { + key + threshold + latestValue + latestValueDisplay + thresholdStatus + } + } + } + } +}` + +type PRMetricsParams struct { + Owner string + RepoName string + Provider string + PRNumber int +} + +type PRMetricsRequest struct { + client graphqlclient.GraphQLClient + Params PRMetricsParams +} + +type PRMetricsResponse struct { + Repository struct { + PullRequest struct { + Number int `json:"number"` + Title string `json:"title"` + BaseBranch string `json:"baseBranch"` + Branch string `json:"branch"` + Metrics []struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + Description string `json:"description"` + PositiveDirection string `json:"positiveDirection"` + Unit string `json:"unit"` + IsReported bool `json:"isReported"` + IsThresholdEnforced bool `json:"isThresholdEnforced"` + Items []struct { + Key string `json:"key"` + Threshold *int `json:"threshold"` + LatestValue *float64 `json:"latestValue"` + LatestValueDisplay string `json:"latestValueDisplay"` + ThresholdStatus string `json:"thresholdStatus"` + } `json:"items"` + } `json:"metrics"` + } `json:"pullRequest"` + } `json:"repository"` +} + +func NewPRMetricsRequest(client graphqlclient.GraphQLClient, params PRMetricsParams) *PRMetricsRequest { + return &PRMetricsRequest{client: client, Params: params} +} + +func (r *PRMetricsRequest) Do(ctx context.Context) (*metrics.PRMetrics, error) { + vars := map[string]any{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + "prNumber": r.Params.PRNumber, + } + var respData PRMetricsResponse + if err := r.client.Query(ctx, fetchPRMetricsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch PR metrics: %w", err) + } + + pr := respData.Repository.PullRequest + result := &metrics.PRMetrics{ + Number: pr.Number, + Title: pr.Title, + BaseBranch: pr.BaseBranch, + Branch: pr.Branch, + Metrics: make([]metrics.RepositoryMetric, 0, len(pr.Metrics)), + } + + for _, m := range pr.Metrics { + metric := metrics.RepositoryMetric{ + Name: m.Name, + Shortcode: m.Shortcode, + Description: m.Description, + PositiveDirection: m.PositiveDirection, + Unit: m.Unit, + IsReported: m.IsReported, + IsThresholdEnforced: m.IsThresholdEnforced, + Items: make([]metrics.RepositoryMetricItem, 0, len(m.Items)), + } + for _, item := range m.Items { + metric.Items = append(metric.Items, metrics.RepositoryMetricItem{ + Key: item.Key, + Threshold: item.Threshold, + LatestValue: item.LatestValue, + LatestValueDisplay: item.LatestValueDisplay, + ThresholdStatus: item.ThresholdStatus, + }) + } + result.Metrics = append(result.Metrics, metric) + } + + return result, nil +} diff --git a/deepsource/metrics/queries/repo.go b/deepsource/metrics/queries/repo.go new file mode 100644 index 00000000..95471a09 --- /dev/null +++ b/deepsource/metrics/queries/repo.go @@ -0,0 +1,109 @@ +// Fetches metrics from a repository's default branch +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/metrics" +) + +const fetchRepoMetricsQuery = `query GetRepoMetrics( + $name: String! + $owner: String! + $provider: VCSProvider! +) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + metrics { + name + shortcode + description + positiveDirection + unit + isReported + isThresholdEnforced + items { + key + threshold + latestValue + latestValueDisplay + thresholdStatus + } + } + } +}` + +type RepoMetricsParams struct { + Owner string + RepoName string + Provider string +} + +type RepoMetricsRequest struct { + client graphqlclient.GraphQLClient + Params RepoMetricsParams +} + +type RepoMetricsResponse struct { + Repository struct { + Metrics []struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + Description string `json:"description"` + PositiveDirection string `json:"positiveDirection"` + Unit string `json:"unit"` + IsReported bool `json:"isReported"` + IsThresholdEnforced bool `json:"isThresholdEnforced"` + Items []struct { + Key string `json:"key"` + Threshold *int `json:"threshold"` + LatestValue *float64 `json:"latestValue"` + LatestValueDisplay string `json:"latestValueDisplay"` + ThresholdStatus string `json:"thresholdStatus"` + } `json:"items"` + } `json:"metrics"` + } `json:"repository"` +} + +func NewRepoMetricsRequest(client graphqlclient.GraphQLClient, params RepoMetricsParams) *RepoMetricsRequest { + return &RepoMetricsRequest{client: client, Params: params} +} + +func (r *RepoMetricsRequest) Do(ctx context.Context) ([]metrics.RepositoryMetric, error) { + vars := map[string]any{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + } + var respData RepoMetricsResponse + if err := r.client.Query(ctx, fetchRepoMetricsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch repo metrics: %w", err) + } + + result := make([]metrics.RepositoryMetric, 0, len(respData.Repository.Metrics)) + for _, m := range respData.Repository.Metrics { + metric := metrics.RepositoryMetric{ + Name: m.Name, + Shortcode: m.Shortcode, + Description: m.Description, + PositiveDirection: m.PositiveDirection, + Unit: m.Unit, + IsReported: m.IsReported, + IsThresholdEnforced: m.IsThresholdEnforced, + Items: make([]metrics.RepositoryMetricItem, 0, len(m.Items)), + } + for _, item := range m.Items { + metric.Items = append(metric.Items, metrics.RepositoryMetricItem{ + Key: item.Key, + Threshold: item.Threshold, + LatestValue: item.LatestValue, + LatestValueDisplay: item.LatestValueDisplay, + ThresholdStatus: item.ThresholdStatus, + }) + } + result = append(result, metric) + } + + return result, nil +} diff --git a/deepsource/metrics/queries/run.go b/deepsource/metrics/queries/run.go new file mode 100644 index 00000000..5c53a03f --- /dev/null +++ b/deepsource/metrics/queries/run.go @@ -0,0 +1,198 @@ +// Fetches metrics from a specific analysis run +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/metrics" +) + +const fetchRunMetricsQuery = `query GetRunMetrics($commitOid: String!) { + run(commitOid: $commitOid) { + commitOid + branchName + status + changesetStats { + lines { + overall + overallCovered + new + newCovered + } + branches { + overall + overallCovered + new + newCovered + } + conditions { + overall + overallCovered + new + newCovered + } + } + checks { + edges { + node { + metrics { + name + shortcode + description + positiveDirection + unit + isReported + isThresholdEnforced + items { + key + threshold + latestValue + latestValueDisplay + thresholdStatus + } + } + } + } + } + } +}` + +type RunMetricsParams struct { + CommitOid string +} + +type RunMetricsRequest struct { + client graphqlclient.GraphQLClient + Params RunMetricsParams +} + +type RunMetricsResponse struct { + Run struct { + CommitOid string `json:"commitOid"` + BranchName string `json:"branchName"` + Status string `json:"status"` + ChangesetStats *struct { + Lines struct { + Overall *int `json:"overall"` + OverallCovered *int `json:"overallCovered"` + New *int `json:"new"` + NewCovered *int `json:"newCovered"` + } `json:"lines"` + Branches struct { + Overall *int `json:"overall"` + OverallCovered *int `json:"overallCovered"` + New *int `json:"new"` + NewCovered *int `json:"newCovered"` + } `json:"branches"` + Conditions struct { + Overall *int `json:"overall"` + OverallCovered *int `json:"overallCovered"` + New *int `json:"new"` + NewCovered *int `json:"newCovered"` + } `json:"conditions"` + } `json:"changesetStats"` + Checks struct { + Edges []struct { + Node struct { + Metrics []struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + Description string `json:"description"` + PositiveDirection string `json:"positiveDirection"` + Unit string `json:"unit"` + IsReported bool `json:"isReported"` + IsThresholdEnforced bool `json:"isThresholdEnforced"` + Items []struct { + Key string `json:"key"` + Threshold *int `json:"threshold"` + LatestValue *float64 `json:"latestValue"` + LatestValueDisplay string `json:"latestValueDisplay"` + ThresholdStatus string `json:"thresholdStatus"` + } `json:"items"` + } `json:"metrics"` + } `json:"node"` + } `json:"edges"` + } `json:"checks"` + } `json:"run"` +} + +func NewRunMetricsRequest(client graphqlclient.GraphQLClient, params RunMetricsParams) *RunMetricsRequest { + return &RunMetricsRequest{client: client, Params: params} +} + +func (r *RunMetricsRequest) Do(ctx context.Context) (*metrics.RunMetrics, error) { + vars := map[string]any{ + "commitOid": r.Params.CommitOid, + } + var respData RunMetricsResponse + if err := r.client.Query(ctx, fetchRunMetricsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch run metrics: %w", err) + } + + result := &metrics.RunMetrics{ + CommitOid: respData.Run.CommitOid, + BranchName: respData.Run.BranchName, + Status: respData.Run.Status, + Metrics: make([]metrics.RepositoryMetric, 0), + } + + // Add changeset stats if available + if respData.Run.ChangesetStats != nil { + result.ChangesetStats = &metrics.ChangesetStats{ + Lines: metrics.ChangesetStatsCounts{ + Overall: respData.Run.ChangesetStats.Lines.Overall, + OverallCovered: respData.Run.ChangesetStats.Lines.OverallCovered, + New: respData.Run.ChangesetStats.Lines.New, + NewCovered: respData.Run.ChangesetStats.Lines.NewCovered, + }, + Branches: metrics.ChangesetStatsCounts{ + Overall: respData.Run.ChangesetStats.Branches.Overall, + OverallCovered: respData.Run.ChangesetStats.Branches.OverallCovered, + New: respData.Run.ChangesetStats.Branches.New, + NewCovered: respData.Run.ChangesetStats.Branches.NewCovered, + }, + Conditions: metrics.ChangesetStatsCounts{ + Overall: respData.Run.ChangesetStats.Conditions.Overall, + OverallCovered: respData.Run.ChangesetStats.Conditions.OverallCovered, + New: respData.Run.ChangesetStats.Conditions.New, + NewCovered: respData.Run.ChangesetStats.Conditions.NewCovered, + }, + } + } + + // Collect metrics from all checks (de-duplicate by shortcode) + seen := make(map[string]bool) + for _, checkEdge := range respData.Run.Checks.Edges { + for _, m := range checkEdge.Node.Metrics { + if m.Shortcode == "" || seen[m.Shortcode] { + continue + } + seen[m.Shortcode] = true + + metric := metrics.RepositoryMetric{ + Name: m.Name, + Shortcode: m.Shortcode, + Description: m.Description, + PositiveDirection: m.PositiveDirection, + Unit: m.Unit, + IsReported: m.IsReported, + IsThresholdEnforced: m.IsThresholdEnforced, + Items: make([]metrics.RepositoryMetricItem, 0, len(m.Items)), + } + for _, item := range m.Items { + metric.Items = append(metric.Items, metrics.RepositoryMetricItem{ + Key: item.Key, + Threshold: item.Threshold, + LatestValue: item.LatestValue, + LatestValueDisplay: item.LatestValueDisplay, + ThresholdStatus: item.ThresholdStatus, + }) + } + result.Metrics = append(result.Metrics, metric) + } + } + + return result, nil +} diff --git a/deepsource/metrics/types.go b/deepsource/metrics/types.go new file mode 100644 index 00000000..b3d153cc --- /dev/null +++ b/deepsource/metrics/types.go @@ -0,0 +1,55 @@ +package metrics + +// RepositoryMetric represents a metric from a repository +type RepositoryMetric struct { + Name string `json:"name" yaml:"name"` + Shortcode string `json:"shortcode" yaml:"shortcode"` + Description string `json:"description" yaml:"description"` + PositiveDirection string `json:"positive_direction" yaml:"positive_direction"` + Unit string `json:"unit" yaml:"unit"` + IsReported bool `json:"is_reported" yaml:"is_reported"` + IsThresholdEnforced bool `json:"is_threshold_enforced" yaml:"is_threshold_enforced"` + Items []RepositoryMetricItem `json:"items" yaml:"items"` +} + +// RepositoryMetricItem represents an item within a repository metric +type RepositoryMetricItem struct { + Key string `json:"key" yaml:"key"` + Threshold *int `json:"threshold,omitempty" yaml:"threshold,omitempty"` + LatestValue *float64 `json:"latest_value,omitempty" yaml:"latest_value,omitempty"` + LatestValueDisplay string `json:"latest_value_display" yaml:"latest_value_display"` + ThresholdStatus string `json:"threshold_status,omitempty" yaml:"threshold_status,omitempty"` +} + +// RunMetrics contains metrics from a specific run +type RunMetrics struct { + CommitOid string `json:"commit_oid" yaml:"commit_oid"` + BranchName string `json:"branch_name" yaml:"branch_name"` + Status string `json:"status" yaml:"status"` + Metrics []RepositoryMetric `json:"metrics" yaml:"metrics"` + ChangesetStats *ChangesetStats `json:"changeset_stats,omitempty" yaml:"changeset_stats,omitempty"` +} + +// ChangesetStats contains coverage stats for a changeset +type ChangesetStats struct { + Lines ChangesetStatsCounts `json:"lines" yaml:"lines"` + Branches ChangesetStatsCounts `json:"branches" yaml:"branches"` + Conditions ChangesetStatsCounts `json:"conditions" yaml:"conditions"` +} + +// ChangesetStatsCounts contains coverage counts +type ChangesetStatsCounts struct { + Overall *int `json:"overall,omitempty" yaml:"overall,omitempty"` + OverallCovered *int `json:"overall_covered,omitempty" yaml:"overall_covered,omitempty"` + New *int `json:"new,omitempty" yaml:"new,omitempty"` + NewCovered *int `json:"new_covered,omitempty" yaml:"new_covered,omitempty"` +} + +// PRMetrics contains metrics from a pull request +type PRMetrics struct { + Number int `json:"number" yaml:"number"` + Title string `json:"title" yaml:"title"` + BaseBranch string `json:"base_branch" yaml:"base_branch"` + Branch string `json:"branch" yaml:"branch"` + Metrics []RepositoryMetric `json:"metrics" yaml:"metrics"` +} diff --git a/deepsource/repository/queries/enabled_analyzers.go b/deepsource/repository/queries/enabled_analyzers.go new file mode 100644 index 00000000..b18253a2 --- /dev/null +++ b/deepsource/repository/queries/enabled_analyzers.go @@ -0,0 +1,71 @@ +package repository + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/analyzers" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" +) + +const enabledAnalyzersQuery = `query EnabledAnalyzers($name: String!, $owner: String!, $provider: VCSProvider!) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + enabledAnalyzers { + edges { + node { + name + shortcode + } + } + } + } +}` + +type EnabledAnalyzersParams struct { + Owner string + RepoName string + Provider string +} + +type EnabledAnalyzersRequest struct { + client graphqlclient.GraphQLClient + Params EnabledAnalyzersParams +} + +type EnabledAnalyzersResponse struct { + Repository struct { + EnabledAnalyzers struct { + Edges []struct { + Node struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + } `json:"node"` + } `json:"edges"` + } `json:"enabledAnalyzers"` + } `json:"repository"` +} + +func NewEnabledAnalyzersRequest(client graphqlclient.GraphQLClient, params EnabledAnalyzersParams) *EnabledAnalyzersRequest { + return &EnabledAnalyzersRequest{client: client, Params: params} +} + +func (r *EnabledAnalyzersRequest) Do(ctx context.Context) ([]analyzers.Analyzer, error) { + vars := map[string]interface{}{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + } + var respData EnabledAnalyzersResponse + if err := r.client.Query(ctx, enabledAnalyzersQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch enabled analyzers: %w", err) + } + + result := make([]analyzers.Analyzer, len(respData.Repository.EnabledAnalyzers.Edges)) + for i, edge := range respData.Repository.EnabledAnalyzers.Edges { + result[i] = analyzers.Analyzer{ + Name: edge.Node.Name, + Shortcode: edge.Node.Shortcode, + } + } + return result, nil +} diff --git a/deepsource/repository/queries/repository_status.go b/deepsource/repository/queries/repository_status.go index a0c37974..0a9783ea 100644 --- a/deepsource/repository/queries/repository_status.go +++ b/deepsource/repository/queries/repository_status.go @@ -4,8 +4,8 @@ import ( "context" "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/repository" - "github.com/deepsourcelabs/graphql" ) // Query to fetch the status of the repo data sent as param @@ -22,6 +22,7 @@ type RepoStatusParams struct { } type RepoStatusRequest struct { + client graphqlclient.GraphQLClient Params RepoStatusParams } @@ -31,28 +32,19 @@ type RepoStatusResponse struct { } `json:"repository"` } -// GraphQL client interface -type IGQLClient interface { - GQL() *graphql.Client - GetToken() string +func NewRepoStatusRequest(client graphqlclient.GraphQLClient, params RepoStatusParams) *RepoStatusRequest { + return &RepoStatusRequest{client: client, Params: params} } -func (r RepoStatusRequest) Do(ctx context.Context, client IGQLClient) (*repository.Meta, error) { - req := graphql.NewRequest(repoStatusQuery) - req.Var("name", r.Params.RepoName) - req.Var("owner", r.Params.Owner) - req.Var("provider", r.Params.Provider) - - // set header fields - req.Header.Set("Cache-Control", "no-cache") - // Adding PAT as header for auth - tokenHeader := fmt.Sprintf("Bearer %s", client.GetToken()) - req.Header.Add("Authorization", tokenHeader) - - // run it and capture the response +func (r *RepoStatusRequest) Do(ctx context.Context) (*repository.Meta, error) { + vars := map[string]interface{}{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + } var respData RepoStatusResponse - if err := client.GQL().Run(ctx, req, &respData); err != nil { - return nil, err + if err := r.client.Query(ctx, repoStatusQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch repo status: %w", err) } // Formatting the query response w.r.t the repository.Meta structure diff --git a/deepsource/runs/queries/get_run_issues.go b/deepsource/runs/queries/get_run_issues.go new file mode 100644 index 00000000..06bd9353 --- /dev/null +++ b/deepsource/runs/queries/get_run_issues.go @@ -0,0 +1,135 @@ +// Get issues for a specific analysis run +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/runs" +) + +const getRunIssuesQuery = `query GetRunIssues($commitOid: String!) { + run(commitOid: $commitOid) { + runUid + commitOid + branchName + status + checks { + edges { + node { + analyzer { + name + shortcode + } + issues { + edges { + node { + source + path + beginLine + beginColumn + endLine + endColumn + title + shortcode + category + severity + } + } + } + } + } + } + } +}` + +type RunIssuesParams struct { + CommitOid string +} + +type RunIssuesRequest struct { + client graphqlclient.GraphQLClient + Params RunIssuesParams +} + +type RunIssuesResponse struct { + Run struct { + RunUid string `json:"runUid"` + CommitOid string `json:"commitOid"` + BranchName string `json:"branchName"` + Status string `json:"status"` + Checks struct { + Edges []struct { + Node struct { + Analyzer struct { + Name string `json:"name"` + Shortcode string `json:"shortcode"` + } `json:"analyzer"` + Issues struct { + Edges []struct { + Node struct { + Source string `json:"source"` + Path string `json:"path"` + BeginLine int `json:"beginLine"` + BeginColumn int `json:"beginColumn"` + EndLine int `json:"endLine"` + EndColumn int `json:"endColumn"` + Title string `json:"title"` + Shortcode string `json:"shortcode"` + Category string `json:"category"` + Severity string `json:"severity"` + } `json:"node"` + } `json:"edges"` + } `json:"issues"` + } `json:"node"` + } `json:"edges"` + } `json:"checks"` + } `json:"run"` +} + +func NewRunIssuesRequest(client graphqlclient.GraphQLClient, params RunIssuesParams) *RunIssuesRequest { + return &RunIssuesRequest{client: client, Params: params} +} + +func (r *RunIssuesRequest) Do(ctx context.Context) (*runs.RunWithIssues, error) { + vars := map[string]interface{}{ + "commitOid": r.Params.CommitOid, + } + var respData RunIssuesResponse + if err := r.client.Query(ctx, getRunIssuesQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Get run issues: %w", err) + } + + result := &runs.RunWithIssues{ + RunUid: respData.Run.RunUid, + CommitOid: respData.Run.CommitOid, + BranchName: respData.Run.BranchName, + Status: respData.Run.Status, + Issues: make([]runs.RunIssue, 0), + } + + for _, checkEdge := range respData.Run.Checks.Edges { + check := checkEdge.Node + for _, issueEdge := range check.Issues.Edges { + node := issueEdge.Node + issue := runs.RunIssue{ + Path: node.Path, + BeginLine: node.BeginLine, + BeginColumn: node.BeginColumn, + EndLine: node.EndLine, + EndColumn: node.EndColumn, + IssueCode: node.Shortcode, + Title: node.Title, + Category: node.Category, + Severity: node.Severity, + Source: node.Source, + AnalyzerName: check.Analyzer.Name, + AnalyzerShortcode: check.Analyzer.Shortcode, + } + result.Issues = append(result.Issues, issue) + } + } + + return result, nil +} diff --git a/deepsource/runs/queries/list_analysis_runs.go b/deepsource/runs/queries/list_analysis_runs.go new file mode 100644 index 00000000..527f00fb --- /dev/null +++ b/deepsource/runs/queries/list_analysis_runs.go @@ -0,0 +1,107 @@ +// Lists the analysis runs for a repository +package queries + +import ( + "context" + "fmt" + "time" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/runs" +) + +const fetchAnalysisRunsQuery = `query GetAnalysisRuns( + $name: String! + $owner: String! + $provider: VCSProvider! + $limit: Int! +) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + analysisRuns(first: $limit) { + edges { + node { + runUid + commitOid + branchName + status + createdAt + finishedAt + summary { + occurrencesIntroduced + occurrencesResolved + occurrencesSuppressed + } + } + } + } + } +}` + +type AnalysisRunsListParams struct { + Owner string + RepoName string + Provider string + Limit int +} + +type AnalysisRunsListRequest struct { + client graphqlclient.GraphQLClient + Params AnalysisRunsListParams +} + +type AnalysisRunsListResponse struct { + Repository struct { + AnalysisRuns struct { + Edges []struct { + Node struct { + RunUid string `json:"runUid"` + CommitOid string `json:"commitOid"` + BranchName string `json:"branchName"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + FinishedAt *time.Time `json:"finishedAt"` + Summary struct { + OccurrencesIntroduced int `json:"occurrencesIntroduced"` + OccurrencesResolved int `json:"occurrencesResolved"` + OccurrencesSuppressed int `json:"occurrencesSuppressed"` + } `json:"summary"` + } `json:"node"` + } `json:"edges"` + } `json:"analysisRuns"` + } `json:"repository"` +} + +func NewAnalysisRunsListRequest(client graphqlclient.GraphQLClient, params AnalysisRunsListParams) *AnalysisRunsListRequest { + return &AnalysisRunsListRequest{client: client, Params: params} +} + +func (r *AnalysisRunsListRequest) Do(ctx context.Context) ([]runs.AnalysisRun, error) { + vars := map[string]interface{}{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + "limit": r.Params.Limit, + } + var respData AnalysisRunsListResponse + if err := r.client.Query(ctx, fetchAnalysisRunsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("List analysis runs: %w", err) + } + + result := make([]runs.AnalysisRun, 0, len(respData.Repository.AnalysisRuns.Edges)) + for _, edge := range respData.Repository.AnalysisRuns.Edges { + run := runs.AnalysisRun{ + RunUid: edge.Node.RunUid, + CommitOid: edge.Node.CommitOid, + BranchName: edge.Node.BranchName, + Status: edge.Node.Status, + CreatedAt: edge.Node.CreatedAt, + FinishedAt: edge.Node.FinishedAt, + OccurrencesIntroduced: edge.Node.Summary.OccurrencesIntroduced, + OccurrencesResolved: edge.Node.Summary.OccurrencesResolved, + OccurrencesSuppressed: edge.Node.Summary.OccurrencesSuppressed, + } + result = append(result, run) + } + + return result, nil +} diff --git a/deepsource/runs/types.go b/deepsource/runs/types.go new file mode 100644 index 00000000..7196cb6e --- /dev/null +++ b/deepsource/runs/types.go @@ -0,0 +1,42 @@ +package runs + +import "time" + +// AnalysisRun represents a single analysis run in the repository history +type AnalysisRun struct { + RunUid string `json:"runUid"` + CommitOid string `json:"commitOid"` + BranchName string `json:"branchName"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + FinishedAt *time.Time `json:"finishedAt"` + OccurrencesIntroduced int `json:"occurrencesIntroduced"` + OccurrencesResolved int `json:"occurrencesResolved"` + OccurrencesSuppressed int `json:"occurrencesSuppressed"` +} + +// RunWithIssues represents an analysis run with its associated issues +type RunWithIssues struct { + RunUid string `json:"runUid"` + CommitOid string `json:"commitOid"` + BranchName string `json:"branchName"` + Status string `json:"status"` + Issues []RunIssue `json:"issues"` +} + +// RunIssue represents an issue found in an analysis run +type RunIssue struct { + Path string `json:"path"` + BeginLine int `json:"beginLine"` + BeginColumn int `json:"beginColumn"` + EndLine int `json:"endLine"` + EndColumn int `json:"endColumn"` + IssueText string `json:"issueText"` + IssueCode string `json:"issueCode"` + Title string `json:"title"` + Category string `json:"category"` + Severity string `json:"severity"` + Source string `json:"source"` + AnalyzerName string `json:"analyzerName"` + AnalyzerShortcode string `json:"analyzerShortcode"` +} diff --git a/deepsource/tests/get_analyzers_test.go b/deepsource/tests/get_analyzers_test.go index 8f8b876c..ae213759 100644 --- a/deepsource/tests/get_analyzers_test.go +++ b/deepsource/tests/get_analyzers_test.go @@ -2,42 +2,59 @@ package tests import ( "context" - "io/ioutil" + "fmt" + "io" "log" + "os" "net/http" "reflect" "testing" analyzers "github.com/deepsourcelabs/cli/deepsource/analyzers/queries" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/graphql" ) -// mock client -type Client struct { - gql *graphql.Client - token string +type testGraphQLClient struct { + client *graphql.Client + token string } -// Returns a GraphQL client which can be used to interact with the GQL APIs -func (c Client) GQL() *graphql.Client { - return c.gql +func (c *testGraphQLClient) Query(ctx context.Context, query string, vars map[string]interface{}, result interface{}) error { + return c.run(ctx, query, vars, result) } -// Returns the token which is required for authentication and thus, interacting with the APIs -func (c Client) GetToken() string { - return c.token +func (c *testGraphQLClient) Mutate(ctx context.Context, mutation string, vars map[string]interface{}, result interface{}) error { + return c.run(ctx, mutation, vars, result) +} + +func (c *testGraphQLClient) SetAuthToken(token string) { + c.token = token +} + +func (c *testGraphQLClient) run(ctx context.Context, query string, vars map[string]interface{}, result interface{}) error { + req := graphql.NewRequest(query) + req.Header.Set("Cache-Control", "no-cache") + if c.token != "" { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", c.token)) + } + for key, value := range vars { + req.Var(key, value) + } + return c.client.Run(ctx, req, result) } func TestAnalyzers(t *testing.T) { t.Run("valid GraphQL request", func(t *testing.T) { // create client gql := graphql.NewClient("http://localhost:8081/analyzer") - c := Client{gql: gql, token: "secret"} + c := &testGraphQLClient{client: gql, token: "secret"} // perform request - req := analyzers.AnalyzersRequest{} + var gqlClient graphqlclient.GraphQLClient = c + req := analyzers.NewAnalyzersRequest(gqlClient) ctx := context.Background() - _, err := req.Do(ctx, c) + _, err := req.Do(ctx) if err != nil { t.Error(err.Error()) } @@ -46,24 +63,24 @@ func TestAnalyzers(t *testing.T) { // a mock GraphQL handler for testing func mockAnalyzer(w http.ResponseWriter, r *http.Request) { - req, _ := ioutil.ReadAll(r.Body) + req, _ := io.ReadAll(r.Body) // Read test graphql request body artifact file - requestBodyData, err := ioutil.ReadFile("./testdata/analyzer/request_body.txt") + requestBodyData, err := os.ReadFile("./testdata/analyzer/request_body.txt") if err != nil { log.Println(err) return } // Read test graphql success response body artifact file - successResponseBodyData, err := ioutil.ReadFile("./testdata/analyzer/success_response_body.json") + successResponseBodyData, err := os.ReadFile("./testdata/analyzer/success_response_body.json") if err != nil { log.Println(err) return } // Read test graphql error response body artifact file - errorResponseBodyData, err := ioutil.ReadFile("./testdata/analyzer/error_response_body.json") + errorResponseBodyData, err := os.ReadFile("./testdata/analyzer/error_response_body.json") if err != nil { log.Println(err) return diff --git a/deepsource/tests/get_viewer_test.go b/deepsource/tests/get_viewer_test.go new file mode 100644 index 00000000..ea6898a9 --- /dev/null +++ b/deepsource/tests/get_viewer_test.go @@ -0,0 +1,61 @@ +package tests + +import ( + "context" + "io" + "log" + "os" + "net/http" + "reflect" + "testing" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + userquery "github.com/deepsourcelabs/cli/deepsource/user/queries" + "github.com/deepsourcelabs/graphql" +) + +func TestViewer(t *testing.T) { + t.Run("valid GraphQL request", func(t *testing.T) { + gql := graphql.NewClient("http://localhost:8081/viewer") + c := &testGraphQLClient{client: gql, token: "secret"} + + var gqlClient graphqlclient.GraphQLClient = c + req := userquery.NewViewerRequest(gqlClient) + ctx := context.Background() + _, err := req.Do(ctx) + if err != nil { + t.Error(err.Error()) + } + }) +} + +func mockViewer(w http.ResponseWriter, r *http.Request) { + req, _ := io.ReadAll(r.Body) + + requestBodyData, err := os.ReadFile("./testdata/viewer/request_body.txt") + if err != nil { + log.Println(err) + return + } + + successResponseBodyData, err := os.ReadFile("./testdata/viewer/success_response_body.json") + if err != nil { + log.Println(err) + return + } + + errorResponseBodyData, err := os.ReadFile("./testdata/viewer/error_response_body.json") + if err != nil { + log.Println(err) + return + } + + w.WriteHeader(http.StatusOK) + w.Header().Set("Content-Type", "application/json") + + if reflect.DeepEqual(requestBodyData, req) { + w.Write([]byte(successResponseBodyData)) + } else { + w.Write([]byte(errorResponseBodyData)) + } +} diff --git a/deepsource/tests/init_test.go b/deepsource/tests/init_test.go index 91a0dfab..d74eb8c5 100644 --- a/deepsource/tests/init_test.go +++ b/deepsource/tests/init_test.go @@ -3,9 +3,11 @@ package tests import ( "fmt" "log" + "net" "net/http" "os" "testing" + "time" ) var srv *http.Server @@ -27,6 +29,7 @@ func startMockAPIServer() { } http.HandleFunc("/analyzer", mockAnalyzer) + http.HandleFunc("/viewer", mockViewer) go func() { err := srv.ListenAndServe() @@ -34,4 +37,15 @@ func startMockAPIServer() { panic(fmt.Sprintf("failed to start HTTP mock server with error=%s", err)) } }() + + // Wait for server to be ready + for i := 0; i < 200; i++ { + conn, err := net.Dial("tcp", "localhost:8081") + if err == nil { + conn.Close() + return + } + time.Sleep(10 * time.Millisecond) + } + panic("mock server failed to start within timeout") } diff --git a/deepsource/tests/testdata/viewer/error_response_body.json b/deepsource/tests/testdata/viewer/error_response_body.json new file mode 100644 index 00000000..48fc5801 --- /dev/null +++ b/deepsource/tests/testdata/viewer/error_response_body.json @@ -0,0 +1,4 @@ +{ + "message": "Something went wrong. We are investigating this. Kindly contact support@deepsource.io if the problem persists.", + "documentation_url": "https://deepsource.io/docs/" +} diff --git a/deepsource/tests/testdata/viewer/request_body.txt b/deepsource/tests/testdata/viewer/request_body.txt new file mode 100644 index 00000000..355721bf --- /dev/null +++ b/deepsource/tests/testdata/viewer/request_body.txt @@ -0,0 +1,18 @@ +{ + viewer { + id + firstName + lastName + email + accounts { + edges { + node { + id + login + type + vcsProvider + } + } + } + } +} diff --git a/deepsource/tests/testdata/viewer/success_response_body.json b/deepsource/tests/testdata/viewer/success_response_body.json new file mode 100644 index 00000000..70141464 --- /dev/null +++ b/deepsource/tests/testdata/viewer/success_response_body.json @@ -0,0 +1 @@ +{"data":{"viewer":{"id":"VXNlcjp6a2dkZXk=","firstName":"John","lastName":"Doe","email":"john@example.com","accounts":{"edges":[{"node":{"id":"QWNjb3VudDox","login":"team-alpha","type":"TEAM","vcsProvider":"GITHUB"}},{"node":{"id":"QWNjb3VudDoy","login":"john","type":"INDIVIDUAL","vcsProvider":"GITHUB"}}]}}}} diff --git a/deepsource/transformers/queries/get_transformers.go b/deepsource/transformers/queries/get_transformers.go index b173e0ce..2d91da31 100644 --- a/deepsource/transformers/queries/get_transformers.go +++ b/deepsource/transformers/queries/get_transformers.go @@ -4,8 +4,8 @@ import ( "context" "fmt" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" "github.com/deepsourcelabs/cli/deepsource/transformers" - "github.com/deepsourcelabs/graphql" ) // Query to list supported Transformers @@ -21,7 +21,9 @@ const listTransformersQuery = ` } }` -type TransformersRequest struct{} +type TransformersRequest struct { + client graphqlclient.GraphQLClient +} type TransformersResponse struct { Transformers struct { @@ -34,25 +36,14 @@ type TransformersResponse struct { } `json:"transformers"` } -// GraphQL client interface -type IGQLClient interface { - GQL() *graphql.Client - GetToken() string +func NewTransformersRequest(client graphqlclient.GraphQLClient) *TransformersRequest { + return &TransformersRequest{client: client} } -func (t TransformersRequest) Do(ctx context.Context, client IGQLClient) ([]transformers.Transformer, error) { - req := graphql.NewRequest(listTransformersQuery) - - // set header fields - req.Header.Set("Cache-Control", "no-cache") - // Adding PAT as header for auth - tokenHeader := fmt.Sprintf("Bearer %s", client.GetToken()) - req.Header.Add("Authorization", tokenHeader) - - // run it and capture the response +func (t *TransformersRequest) Do(ctx context.Context) ([]transformers.Transformer, error) { var respData TransformersResponse - if err := client.GQL().Run(ctx, req, &respData); err != nil { - return nil, err + if err := t.client.Query(ctx, listTransformersQuery, nil, &respData); err != nil { + return nil, fmt.Errorf("Fetch transformers: %w", err) } // Formatting the query response w.r.t the SDK response ([]transformers.Transformer) diff --git a/deepsource/user/queries/viewer.go b/deepsource/user/queries/viewer.go new file mode 100644 index 00000000..0f2d4682 --- /dev/null +++ b/deepsource/user/queries/viewer.go @@ -0,0 +1,83 @@ +package user + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + dsuser "github.com/deepsourcelabs/cli/deepsource/user" +) + +// GraphQL query to fetch the authenticated user. +const viewerQuery = ` +{ + viewer { + id + firstName + lastName + email + accounts { + edges { + node { + id + login + type + vcsProvider + } + } + } + } +}` + +type ViewerRequest struct { + client graphqlclient.GraphQLClient +} + +type viewerResponse struct { + Viewer struct { + ID string `json:"id"` + FirstName string `json:"firstName"` + LastName string `json:"lastName"` + Email string `json:"email"` + Accounts struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Login string `json:"login"` + Type string `json:"type"` + VCSProvider string `json:"vcsProvider"` + } `json:"node"` + } `json:"edges"` + } `json:"accounts"` + } `json:"viewer"` +} + +func NewViewerRequest(client graphqlclient.GraphQLClient) *ViewerRequest { + return &ViewerRequest{client: client} +} + +func (r *ViewerRequest) Do(ctx context.Context) (*dsuser.User, error) { + var respData viewerResponse + if err := r.client.Query(ctx, viewerQuery, nil, &respData); err != nil { + return nil, fmt.Errorf("Fetch viewer: %w", err) + } + + user := &dsuser.User{ + ID: respData.Viewer.ID, + FirstName: respData.Viewer.FirstName, + LastName: respData.Viewer.LastName, + Email: respData.Viewer.Email, + } + if len(respData.Viewer.Accounts.Edges) > 0 { + user.Accounts = make([]dsuser.Account, 0, len(respData.Viewer.Accounts.Edges)) + for _, edge := range respData.Viewer.Accounts.Edges { + user.Accounts = append(user.Accounts, dsuser.Account{ + ID: edge.Node.ID, + Login: edge.Node.Login, + Type: edge.Node.Type, + VCSProvider: edge.Node.VCSProvider, + }) + } + } + return user, nil +} diff --git a/deepsource/user/user.go b/deepsource/user/user.go new file mode 100644 index 00000000..c2fd1097 --- /dev/null +++ b/deepsource/user/user.go @@ -0,0 +1,18 @@ +package user + +// Account represents a DeepSource account. +type Account struct { + ID string `json:"id"` + Login string `json:"login"` + Type string `json:"type"` + VCSProvider string `json:"vcsProvider"` +} + +// User represents the authenticated user. +type User struct { + ID string `json:"id"` + FirstName string `json:"firstName"` + LastName string `json:"lastName"` + Email string `json:"email"` + Accounts []Account `json:"accounts"` +} diff --git a/deepsource/vulnerabilities/queries/pr.go b/deepsource/vulnerabilities/queries/pr.go new file mode 100644 index 00000000..5110b808 --- /dev/null +++ b/deepsource/vulnerabilities/queries/pr.go @@ -0,0 +1,150 @@ +// Fetches vulnerabilities from a pull request +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/vulnerabilities" +) + +const fetchPRVulnsQuery = `query GetPRVulns( + $name: String! + $owner: String! + $provider: VCSProvider! + $prNumber: Int! + $limit: Int! +) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + pullRequest(number: $prNumber) { + number + title + baseBranch + branch + vulnerabilityOccurrences(first: $limit) { + edges { + node { + id + reachability + fixability + vulnerability { + identifier + severity + summary + cvssV3BaseScore + fixedVersions + } + package { + name + ecosystem + } + packageVersion { + version + } + } + } + } + } + } +}` + +type PRVulnsParams struct { + Owner string + RepoName string + Provider string + PRNumber int + Limit int +} + +type PRVulnsRequest struct { + client graphqlclient.GraphQLClient + Params PRVulnsParams +} + +type PRVulnsResponse struct { + Repository struct { + PullRequest struct { + Number int `json:"number"` + Title string `json:"title"` + BaseBranch string `json:"baseBranch"` + Branch string `json:"branch"` + VulnerabilityOccurrences struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Reachability string `json:"reachability"` + Fixability string `json:"fixability"` + Vulnerability struct { + Identifier string `json:"identifier"` + Severity string `json:"severity"` + Summary string `json:"summary"` + CvssV3BaseScore *float64 `json:"cvssV3BaseScore"` + FixedVersions []string `json:"fixedVersions"` + } `json:"vulnerability"` + Package struct { + Name string `json:"name"` + Ecosystem string `json:"ecosystem"` + } `json:"package"` + PackageVersion struct { + Version string `json:"version"` + } `json:"packageVersion"` + } `json:"node"` + } `json:"edges"` + } `json:"vulnerabilityOccurrences"` + } `json:"pullRequest"` + } `json:"repository"` +} + +func NewPRVulnsRequest(client graphqlclient.GraphQLClient, params PRVulnsParams) *PRVulnsRequest { + return &PRVulnsRequest{client: client, Params: params} +} + +func (r *PRVulnsRequest) Do(ctx context.Context) (*vulnerabilities.PRVulns, error) { + vars := map[string]any{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + "prNumber": r.Params.PRNumber, + "limit": r.Params.Limit, + } + var respData PRVulnsResponse + if err := r.client.Query(ctx, fetchPRVulnsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch PR vulnerabilities: %w", err) + } + + pr := respData.Repository.PullRequest + result := &vulnerabilities.PRVulns{ + Number: pr.Number, + Title: pr.Title, + BaseBranch: pr.BaseBranch, + Branch: pr.Branch, + Vulns: make([]vulnerabilities.VulnerabilityOccurrence, 0), + } + + for _, edge := range pr.VulnerabilityOccurrences.Edges { + node := edge.Node + v := vulnerabilities.VulnerabilityOccurrence{ + ID: node.ID, + Reachability: node.Reachability, + Fixability: node.Fixability, + Vulnerability: vulnerabilities.Vulnerability{ + Identifier: node.Vulnerability.Identifier, + Severity: node.Vulnerability.Severity, + Summary: node.Vulnerability.Summary, + CvssV3BaseScore: node.Vulnerability.CvssV3BaseScore, + FixedVersions: node.Vulnerability.FixedVersions, + }, + Package: vulnerabilities.Package{ + Name: node.Package.Name, + Ecosystem: node.Package.Ecosystem, + }, + PackageVersion: vulnerabilities.PackageVersion{ + Version: node.PackageVersion.Version, + }, + } + result.Vulns = append(result.Vulns, v) + } + + return result, nil +} diff --git a/deepsource/vulnerabilities/queries/repo.go b/deepsource/vulnerabilities/queries/repo.go new file mode 100644 index 00000000..2507ac04 --- /dev/null +++ b/deepsource/vulnerabilities/queries/repo.go @@ -0,0 +1,127 @@ +// Fetches vulnerabilities from a repository's default branch +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/vulnerabilities" +) + +const fetchRepoVulnsQuery = `query GetRepoVulns( + $name: String! + $owner: String! + $provider: VCSProvider! + $limit: Int! +) { + repository(name: $name, login: $owner, vcsProvider: $provider) { + dependencyVulnerabilityOccurrences(first: $limit) { + edges { + node { + id + reachability + fixability + vulnerability { + identifier + severity + summary + cvssV3BaseScore + fixedVersions + } + package { + name + ecosystem + } + packageVersion { + version + } + } + } + } + } +}` + +type RepoVulnsParams struct { + Owner string + RepoName string + Provider string + Limit int +} + +type RepoVulnsRequest struct { + client graphqlclient.GraphQLClient + Params RepoVulnsParams +} + +type RepoVulnsResponse struct { + Repository struct { + DependencyVulnerabilityOccurrences struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Reachability string `json:"reachability"` + Fixability string `json:"fixability"` + Vulnerability struct { + Identifier string `json:"identifier"` + Severity string `json:"severity"` + Summary string `json:"summary"` + CvssV3BaseScore *float64 `json:"cvssV3BaseScore"` + FixedVersions []string `json:"fixedVersions"` + } `json:"vulnerability"` + Package struct { + Name string `json:"name"` + Ecosystem string `json:"ecosystem"` + } `json:"package"` + PackageVersion struct { + Version string `json:"version"` + } `json:"packageVersion"` + } `json:"node"` + } `json:"edges"` + } `json:"dependencyVulnerabilityOccurrences"` + } `json:"repository"` +} + +func NewRepoVulnsRequest(client graphqlclient.GraphQLClient, params RepoVulnsParams) *RepoVulnsRequest { + return &RepoVulnsRequest{client: client, Params: params} +} + +func (r *RepoVulnsRequest) Do(ctx context.Context) ([]vulnerabilities.VulnerabilityOccurrence, error) { + vars := map[string]any{ + "name": r.Params.RepoName, + "owner": r.Params.Owner, + "provider": r.Params.Provider, + "limit": r.Params.Limit, + } + var respData RepoVulnsResponse + if err := r.client.Query(ctx, fetchRepoVulnsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch repo vulnerabilities: %w", err) + } + + result := make([]vulnerabilities.VulnerabilityOccurrence, 0) + for _, edge := range respData.Repository.DependencyVulnerabilityOccurrences.Edges { + node := edge.Node + v := vulnerabilities.VulnerabilityOccurrence{ + ID: node.ID, + Reachability: node.Reachability, + Fixability: node.Fixability, + Vulnerability: vulnerabilities.Vulnerability{ + Identifier: node.Vulnerability.Identifier, + Severity: node.Vulnerability.Severity, + Summary: node.Vulnerability.Summary, + CvssV3BaseScore: node.Vulnerability.CvssV3BaseScore, + FixedVersions: node.Vulnerability.FixedVersions, + }, + Package: vulnerabilities.Package{ + Name: node.Package.Name, + Ecosystem: node.Package.Ecosystem, + }, + PackageVersion: vulnerabilities.PackageVersion{ + Version: node.PackageVersion.Version, + }, + } + result = append(result, v) + } + + return result, nil +} diff --git a/deepsource/vulnerabilities/queries/run.go b/deepsource/vulnerabilities/queries/run.go new file mode 100644 index 00000000..c7078eee --- /dev/null +++ b/deepsource/vulnerabilities/queries/run.go @@ -0,0 +1,145 @@ +// Fetches vulnerabilities from a specific analysis run +package queries + +import ( + "context" + "fmt" + + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/deepsource/vulnerabilities" +) + +const fetchRunVulnsQuery = `query GetRunVulns($commitOid: String!, $limit: Int!) { + run(commitOid: $commitOid) { + commitOid + branchName + status + scaChecks { + edges { + node { + vulnerabilityOccurrences(first: $limit) { + edges { + node { + id + reachability + fixability + vulnerability { + identifier + severity + summary + cvssV3BaseScore + fixedVersions + } + package { + name + ecosystem + } + packageVersion { + version + } + } + } + } + } + } + } + } +}` + +type RunVulnsParams struct { + CommitOid string + Limit int +} + +type RunVulnsRequest struct { + client graphqlclient.GraphQLClient + Params RunVulnsParams +} + +type RunVulnsResponse struct { + Run struct { + CommitOid string `json:"commitOid"` + BranchName string `json:"branchName"` + Status string `json:"status"` + ScaChecks struct { + Edges []struct { + Node struct { + VulnerabilityOccurrences struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Reachability string `json:"reachability"` + Fixability string `json:"fixability"` + Vulnerability struct { + Identifier string `json:"identifier"` + Severity string `json:"severity"` + Summary string `json:"summary"` + CvssV3BaseScore *float64 `json:"cvssV3BaseScore"` + FixedVersions []string `json:"fixedVersions"` + } `json:"vulnerability"` + Package struct { + Name string `json:"name"` + Ecosystem string `json:"ecosystem"` + } `json:"package"` + PackageVersion struct { + Version string `json:"version"` + } `json:"packageVersion"` + } `json:"node"` + } `json:"edges"` + } `json:"vulnerabilityOccurrences"` + } `json:"node"` + } `json:"edges"` + } `json:"scaChecks"` + } `json:"run"` +} + +func NewRunVulnsRequest(client graphqlclient.GraphQLClient, params RunVulnsParams) *RunVulnsRequest { + return &RunVulnsRequest{client: client, Params: params} +} + +func (r *RunVulnsRequest) Do(ctx context.Context) (*vulnerabilities.RunVulns, error) { + vars := map[string]any{ + "commitOid": r.Params.CommitOid, + "limit": r.Params.Limit, + } + var respData RunVulnsResponse + if err := r.client.Query(ctx, fetchRunVulnsQuery, vars, &respData); err != nil { + return nil, fmt.Errorf("Fetch run vulnerabilities: %w", err) + } + + result := &vulnerabilities.RunVulns{ + CommitOid: respData.Run.CommitOid, + BranchName: respData.Run.BranchName, + Status: respData.Run.Status, + Vulns: make([]vulnerabilities.VulnerabilityOccurrence, 0), + } + + // Collect vulns from all SCA checks + for _, checkEdge := range respData.Run.ScaChecks.Edges { + for _, vulnEdge := range checkEdge.Node.VulnerabilityOccurrences.Edges { + node := vulnEdge.Node + v := vulnerabilities.VulnerabilityOccurrence{ + ID: node.ID, + Reachability: node.Reachability, + Fixability: node.Fixability, + Vulnerability: vulnerabilities.Vulnerability{ + Identifier: node.Vulnerability.Identifier, + Severity: node.Vulnerability.Severity, + Summary: node.Vulnerability.Summary, + CvssV3BaseScore: node.Vulnerability.CvssV3BaseScore, + FixedVersions: node.Vulnerability.FixedVersions, + }, + Package: vulnerabilities.Package{ + Name: node.Package.Name, + Ecosystem: node.Package.Ecosystem, + }, + PackageVersion: vulnerabilities.PackageVersion{ + Version: node.PackageVersion.Version, + }, + } + result.Vulns = append(result.Vulns, v) + } + } + + return result, nil +} diff --git a/deepsource/vulnerabilities/types.go b/deepsource/vulnerabilities/types.go new file mode 100644 index 00000000..f9032da0 --- /dev/null +++ b/deepsource/vulnerabilities/types.go @@ -0,0 +1,48 @@ +package vulnerabilities + +// VulnerabilityOccurrence represents a vulnerability occurrence +type VulnerabilityOccurrence struct { + ID string `json:"id" yaml:"id"` + Reachability string `json:"reachability" yaml:"reachability"` + Fixability string `json:"fixability" yaml:"fixability"` + Vulnerability Vulnerability `json:"vulnerability" yaml:"vulnerability"` + Package Package `json:"package" yaml:"package"` + PackageVersion PackageVersion `json:"package_version" yaml:"package_version"` +} + +// Vulnerability represents vulnerability details +type Vulnerability struct { + Identifier string `json:"identifier" yaml:"identifier"` + Severity string `json:"severity" yaml:"severity"` + Summary string `json:"summary" yaml:"summary"` + CvssV3BaseScore *float64 `json:"cvss_v3_base_score,omitempty" yaml:"cvss_v3_base_score,omitempty"` + FixedVersions []string `json:"fixed_versions" yaml:"fixed_versions"` +} + +// Package represents a package +type Package struct { + Name string `json:"name" yaml:"name"` + Ecosystem string `json:"ecosystem" yaml:"ecosystem"` +} + +// PackageVersion represents a package version +type PackageVersion struct { + Version string `json:"version" yaml:"version"` +} + +// RunVulns contains vulnerabilities from a specific run +type RunVulns struct { + CommitOid string `json:"commit_oid" yaml:"commit_oid"` + BranchName string `json:"branch_name" yaml:"branch_name"` + Status string `json:"status" yaml:"status"` + Vulns []VulnerabilityOccurrence `json:"vulnerabilities" yaml:"vulnerabilities"` +} + +// PRVulns contains vulnerabilities from a pull request +type PRVulns struct { + Number int `json:"number" yaml:"number"` + Title string `json:"title" yaml:"title"` + BaseBranch string `json:"base_branch" yaml:"base_branch"` + Branch string `json:"branch" yaml:"branch"` + Vulns []VulnerabilityOccurrence `json:"vulnerabilities" yaml:"vulnerabilities"` +} diff --git a/docs/architecture_refactor_phase1.md b/docs/architecture_refactor_phase1.md new file mode 100644 index 00000000..e14d0a27 --- /dev/null +++ b/docs/architecture_refactor_phase1.md @@ -0,0 +1,25 @@ +# Architecture Refactor Phase 1 Notes + +This document summarizes the Phase 1 foundation work for the DeepSource CLI refactor. + +## Interfaces +- `internal/interfaces/` defines contracts for filesystem, environment, git, HTTP, output, and telemetry. +- These interfaces enable dependency injection and isolate external dependencies. + +## Container +- `internal/container` provides production (`New`) and test (`NewTest`) containers. +- The container wires adapters to the interface contracts and holds shared config state. + +## Dual Output System +- `internal/adapters/dual_output.go` writes user output to stdout. +- Diagnostic output is written to stderr and optionally to a debug log file. +- Set `DEEPSOURCE_CLI_DEBUG=1` to enable logging at `~/.deepsource/cli-debug.log`. +- Set `DEEPSOURCE_CLI_DEBUG=/path/to/log` to write diagnostics to a custom file. + +## Context Propagation +- `command.ExecuteContext(ctx)` sets the context on the root command. +- Existing `command.Execute()` now calls `ExecuteContext(context.Background())`. + +## Next Steps +- Add services and adapters as commands are refactored. +- Move command logic into service packages and inject dependencies via the container. diff --git a/go.mod b/go.mod index a15ae260..9c430349 100644 --- a/go.mod +++ b/go.mod @@ -1,61 +1,46 @@ module github.com/deepsourcelabs/cli -go 1.21 +go 1.25.3 require ( - github.com/AlecAivazis/survey/v2 v2.2.12 - github.com/DataDog/zstd v1.5.5 - github.com/Jeffail/gabs/v2 v2.6.1 + github.com/AlecAivazis/survey/v2 v2.3.7 github.com/MakeNowJust/heredoc v1.0.0 - github.com/cli/browser v1.1.0 + github.com/cli/browser v1.3.0 github.com/deepsourcelabs/graphql v0.2.2 - github.com/fatih/color v1.12.0 - github.com/getsentry/sentry-go v0.6.0 - github.com/google/go-cmp v0.5.5 - github.com/owenrumney/go-sarif/v2 v2.1.0 - github.com/pelletier/go-toml v1.9.2 - github.com/pterm/pterm v0.12.23 - github.com/spf13/cobra v1.5.0 - github.com/spf13/viper v1.7.1 - github.com/stretchr/testify v1.7.0 - github.com/xeipuuv/gojsonschema v1.2.0 + github.com/fatih/color v1.18.0 + github.com/getsentry/sentry-go v0.41.0 + github.com/google/go-cmp v0.6.0 + github.com/klauspost/compress v1.18.3 + github.com/pelletier/go-toml v1.9.5 + github.com/pterm/pterm v0.12.82 + github.com/spf13/cobra v1.10.2 + github.com/stretchr/testify v1.11.1 + gopkg.in/yaml.v3 v3.0.1 ) require ( - github.com/atomicgo/cursor v0.0.1 // indirect + atomicgo.dev/cursor v0.2.0 // indirect + atomicgo.dev/keyboard v0.2.9 // indirect + atomicgo.dev/schedule v0.1.0 // indirect + github.com/clipperhouse/stringish v0.1.1 // indirect + github.com/clipperhouse/uax29/v2 v2.4.0 // indirect + github.com/containerd/console v1.0.5 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/fsnotify/fsnotify v1.4.7 // indirect - github.com/gookit/color v1.4.2 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/gookit/color v1.6.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect - github.com/kr/pretty v0.3.0 // indirect - github.com/magiconair/properties v1.8.1 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/lithammer/fuzzysearch v1.1.8 // indirect github.com/matryer/is v1.4.0 // indirect - github.com/mattn/go-colorable v0.1.8 // indirect - github.com/mattn/go-isatty v0.0.14 // indirect - github.com/mattn/go-runewidth v0.0.13 // indirect - github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b // indirect - github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect + github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rivo/uniseg v0.2.0 // indirect - github.com/rogpeppe/go-internal v1.8.0 // indirect - github.com/spf13/afero v1.1.2 // indirect - github.com/spf13/cast v1.3.0 // indirect - github.com/spf13/jwalterweatherman v1.0.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/subosito/gotenv v1.2.0 // indirect - github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect - github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 // indirect - golang.org/x/crypto v0.18.0 // indirect - golang.org/x/sys v0.16.0 // indirect - golang.org/x/term v0.16.0 // indirect - golang.org/x/text v0.14.0 // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/ini.v1 v1.51.0 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/term v0.39.0 // indirect + golang.org/x/text v0.33.0 // indirect ) diff --git a/go.sum b/go.sum index 353b9568..51fdb1ff 100644 --- a/go.sum +++ b/go.sum @@ -1,526 +1,192 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/AlecAivazis/survey/v2 v2.2.12 h1:5a07y93zA6SZ09gOa9wLVLznF5zTJMQ+pJ3cZK4IuO8= -github.com/AlecAivazis/survey/v2 v2.2.12/go.mod h1:6d4saEvBsfSHXeN1a5OA5m2+HJ2LuVokllnC77pAIKI= -github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/CloudyKit/fastprinter v0.0.0-20170127035650-74b38d55f37a/go.mod h1:EFZQ978U7x8IRnstaskI3IysnWY5Ao3QgZUKOXlsAdw= -github.com/CloudyKit/jet v2.1.3-0.20180809161101-62edd43e4f88+incompatible/go.mod h1:HPYO+50pSWkPoj9Q/eq0aRGByCL6ScRlUmiEX5Zgm+w= -github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= -github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/Jeffail/gabs/v2 v2.6.1 h1:wwbE6nTQTwIMsMxzi6XFQQYRZ6wDc1mSdxoAN+9U4Gk= -github.com/Jeffail/gabs/v2 v2.6.1/go.mod h1:xCn81vdHKxFUuWWAaD5jCTQDNPBMh5pPs9IJ+NcziBI= -github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= -github.com/Joker/jade v1.0.1-0.20190614124447-d475f43051e7/go.mod h1:6E6s8o2AE4KhCrqr6GRJjdC/gNfTdxkIXvuGZZda2VM= +atomicgo.dev/assert v0.0.2 h1:FiKeMiZSgRrZsPo9qn/7vmr7mCsh5SZyXY4YGYiYwrg= +atomicgo.dev/assert v0.0.2/go.mod h1:ut4NcI3QDdJtlmAxQULOmA13Gz6e2DWbSAS8RUOmNYQ= +atomicgo.dev/cursor v0.2.0 h1:H6XN5alUJ52FZZUkI7AlJbUc1aW38GWZalpYRPpoPOw= +atomicgo.dev/cursor v0.2.0/go.mod h1:Lr4ZJB3U7DfPPOkbH7/6TOtJ4vFGHlgj1nc+n900IpU= +atomicgo.dev/keyboard v0.2.9 h1:tOsIid3nlPLZ3lwgG8KZMp/SFmr7P0ssEN5JUsm78K8= +atomicgo.dev/keyboard v0.2.9/go.mod h1:BC4w9g00XkxH/f1HXhW2sXmJFOCWbKn9xrOunSFtExQ= +atomicgo.dev/schedule v0.1.0 h1:nTthAbhZS5YZmgYbb2+DH8uQIZcTlIrd4eYr3UQxEjs= +atomicgo.dev/schedule v0.1.0/go.mod h1:xeUa3oAkiuHYh8bKiQBRojqAMq3PXXbJujjb0hw8pEU= +github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= +github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ= github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= -github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8 h1:xzYJEypr/85nBpB11F9br+3HUrpgb+fcm5iADzXXYEw= -github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8/go.mod h1:oX5x61PbNXchhh0oikYAH+4Pcfw5LKv21+Jnpr6r6Pc= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= -github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/atomicgo/cursor v0.0.1 h1:xdogsqa6YYlLfM+GyClC/Lchf7aiMerFiZQn7soTOoU= +github.com/MarvinJWendt/testza v0.1.0/go.mod h1:7AxNvlfeHP7Z/hDQ5JtE3OKYT3XFUeLCDE2DQninSqs= +github.com/MarvinJWendt/testza v0.2.1/go.mod h1:God7bhG8n6uQxwdScay+gjm9/LnO4D3kkcZX4hv9Rp8= +github.com/MarvinJWendt/testza v0.2.8/go.mod h1:nwIcjmr0Zz+Rcwfh3/4UhBp7ePKVhuBExvZqnKYWlII= +github.com/MarvinJWendt/testza v0.2.10/go.mod h1:pd+VWsoGUiFtq+hRKSU1Bktnn+DMCSrDrXDpX2bG66k= +github.com/MarvinJWendt/testza v0.2.12/go.mod h1:JOIegYyV7rX+7VZ9r77L/eH6CfJHHzXjB69adAhzZkI= +github.com/MarvinJWendt/testza v0.3.0/go.mod h1:eFcL4I0idjtIx8P9C6KkAuLgATNKpX4/2oUqKc6bF2c= +github.com/MarvinJWendt/testza v0.4.2/go.mod h1:mSdhXiKH8sg/gQehJ63bINcCKp7RtYewEjXsvsVUPbE= +github.com/MarvinJWendt/testza v0.5.2 h1:53KDo64C1z/h/d/stCYCPY69bt/OSwjq5KpFNwi+zB4= +github.com/MarvinJWendt/testza v0.5.2/go.mod h1:xu53QFE5sCdjtMCKk8YMQ2MnymimEctc4n3EjyIYvEY= +github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s= +github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= github.com/atomicgo/cursor v0.0.1/go.mod h1:cBON2QmmrysudxNBFthvMtN32r3jxVRIvzkUiF/RuIk= -github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cli/browser v1.1.0 h1:xOZBfkfY9L9vMBgqb1YwRirGu6QFaQ5dP/vXt5ENSOY= -github.com/cli/browser v1.1.0/go.mod h1:HKMQAt9t12kov91Mn7RfZxyJQQgWgyS/3SZswlZ5iTI= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= -github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cli/browser v1.3.0 h1:LejqCrpWr+1pRqmEPDGnTZOjsMe7sehifLynZJuqJpo= +github.com/cli/browser v1.3.0/go.mod h1:HH8s+fOAxjhQoBUAsKuPCbqUuxZDhQ2/aD+SzsEfBTk= +github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs= +github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA= +github.com/clipperhouse/uax29/v2 v2.4.0 h1:RXqE/l5EiAbA4u97giimKNlmpvkmz+GrBVTelsoXy9g= +github.com/clipperhouse/uax29/v2 v2.4.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= +github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= +github.com/containerd/console v1.0.5 h1:R0ymNeydRqH2DmakFNdmjR2k0t7UPuiOV/N/27/qqsc= +github.com/containerd/console v1.0.5/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI= +github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/deepsourcelabs/graphql v0.2.2 h1:6CtKGvVSIY6Jnf72VyfXB77AaaoHATBzXsrh64irtVQ= github.com/deepsourcelabs/graphql v0.2.2/go.mod h1:2hqi4vS0LxP9wMjbkbMOdR/fap2zwDlzqGGO8WEgyBA= -github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= -github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= -github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= -github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA= -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= -github.com/getsentry/sentry-go v0.6.0 h1:kPd+nr+dlXmaarUBg7xlC/qn+7wyMJL6PMsSn5fA+RM= -github.com/getsentry/sentry-go v0.6.0/go.mod h1:0yZBuzSvbZwBnvaF9VwZIMen3kXscY8/uasKtAX1qG8= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= -github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= -github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= -github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= -github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= -github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gookit/color v1.4.2 h1:tXy44JFSFkKnELV6WaMo/lLfu/meqITX3iAV52do7lk= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/getsentry/sentry-go v0.41.0 h1:q/dQZOlEIb4lhxQSjJhQqtRr3vwrJ6Ahe1C9zv+ryRo= +github.com/getsentry/sentry-go v0.41.0/go.mod h1:eRXCoh3uvmjQLY6qu63BjUZnaBu5L5WhMV1RwYO8W5s= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/gookit/assert v0.1.1 h1:lh3GcawXe/p+cU7ESTZ5Ui3Sm/x8JWpIis4/1aF0mY0= +github.com/gookit/assert v0.1.1/go.mod h1:jS5bmIVQZTIwk42uXl4lyj4iaaxx32tqH16CFj0VX2E= github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ= -github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174/go.mod h1:DqJ97dSdRW1W22yXSB90986pcOyQ7r45iio1KN2ez1A= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= -github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0= -github.com/iris-contrib/i18n v0.0.0-20171121225848-987a633949d0/go.mod h1:pMCz62A0xJL6I+umB2YTlFRwWXaDFA0jy+5HzGiJjqI= -github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= -github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q= -github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U= -github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/kataras/golog v0.0.9/go.mod h1:12HJgwBIZFNGL0EJnMRhmvGA0PQGx8VFwrZtM4CqbAk= -github.com/kataras/iris/v12 v12.0.1/go.mod h1:udK4vLQKkdDqMGJJVd/msuMtN6hpYJhg/lSzuxjhO+U= -github.com/kataras/neffos v0.0.10/go.mod h1:ZYmJC07hQPW67eKuzlfY7SO3bC0mw83A3j6im82hfqw= -github.com/kataras/pio v0.0.0-20190103105442-ea782b38602d/go.mod h1:NV88laa9UiiDuX9AhMbDPkGYSPugBOV6yTZB1l2K9Z0= +github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo= +github.com/gookit/color v1.6.0 h1:JjJXBTk1ETNyqyilJhkTXJYYigHG24TM9Xa2M1xAhRA= +github.com/gookit/color v1.6.0/go.mod h1:9ACFc7/1IpHGBW8RwuDm/0YEnhg3dwwXpoMsmtyHfjs= +github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog= +github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= -github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.9.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw= +github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.10/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= +github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= +github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= +github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.4 h1:5Myjjh3JY/NaAi4IsUbHADytDyl1VE1Y9PXDlL+P/VQ= -github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= -github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= -github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/lithammer/fuzzysearch v1.1.8 h1:/HIuJnjHuXS8bKaiTMeeDlW2/AyIWk2brx1V8LFgLN4= +github.com/lithammer/fuzzysearch v1.1.8/go.mod h1:IdqeyBClc3FFqSzYq/MXESsS4S0FsZ5ajtkr5xPLts4= github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mediocregopher/mediocre-go-lib v0.0.0-20181029021733-cb65787f37ed/go.mod h1:dSsfyI2zABAdhcbvkXqgxOxrCsbYeHCPgrZkku60dSg= -github.com/mediocregopher/radix/v3 v3.3.0/go.mod h1:EmfVyvspXz1uZEyPBMyGK+kjWiKQGvsUt6O3Pj+LDCQ= -github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= -github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nats-io/nats.go v1.8.1/go.mod h1:BrFz9vVn0fU3AcH9Vn4Kd7W0NpJ651tD5omQ3M8LwxM= -github.com/nats-io/nkeys v0.0.2/go.mod h1:dab7URMsZm6Z/jp9Z5UGa87Uutgc2mVpXLC4B7TDb/4= -github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U= -github.com/owenrumney/go-sarif/v2 v2.1.0 h1:Xp4RUbbQc5O2/t5YYZ+1QzJ8FZVdJwXeiWssiF5vQrA= -github.com/owenrumney/go-sarif/v2 v2.1.0/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.9.2 h1:7NiByeVF4jKSG1lDF3X8LTIkq2/bu+1uYbIm1eS5tzk= -github.com/pelletier/go-toml v1.9.2/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= +github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/pterm/pterm v0.12.23 h1:+PL0YqmmT0QiDLOpevE3e2HPb5UIDBxh6OlLm8jDhxg= -github.com/pterm/pterm v0.12.23/go.mod h1:PhQ89w4i95rhgE+xedAoqous6K9X+r6aSOI2eFF7DZI= -github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/pterm/pterm v0.12.27/go.mod h1:PhQ89w4i95rhgE+xedAoqous6K9X+r6aSOI2eFF7DZI= +github.com/pterm/pterm v0.12.29/go.mod h1:WI3qxgvoQFFGKGjGnJR849gU0TsEOvKn5Q8LlY1U7lg= +github.com/pterm/pterm v0.12.30/go.mod h1:MOqLIyMOgmTDz9yorcYbcw+HsgoZo3BQfg2wtl3HEFE= +github.com/pterm/pterm v0.12.31/go.mod h1:32ZAWZVXD7ZfG0s8qqHXePte42kdz8ECtRyEejaWgXU= +github.com/pterm/pterm v0.12.33/go.mod h1:x+h2uL+n7CP/rel9+bImHD5lF3nM9vJj80k9ybiiTTE= +github.com/pterm/pterm v0.12.36/go.mod h1:NjiL09hFhT/vWjQHSj1athJpx6H8cjpHXNAK5bUw8T8= +github.com/pterm/pterm v0.12.40/go.mod h1:ffwPLwlbXxP+rxT0GsgDTzS3y3rmpAO1NMjUkGTYf8s= +github.com/pterm/pterm v0.12.82 h1:+D9wYhCaeaK0FIQoZtqbNQuNpe2lB2tajKKsTd5paVQ= +github.com/pterm/pterm v0.12.82/go.mod h1:TyuyrPjnxfwP+ccJdBTeWHtd/e0ybQHkOS/TakajZCw= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU= -github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= -github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.7.1 h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk= -github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= +github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= -github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= -github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= -github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= -github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 h1:QldyIu/L63oPpyvQmHgvgickp1Yw510KJOqX7H24mg8= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= -github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= -github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= -github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= -go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561 h1:MDc5xs78ZrZr3HMQugiXOAkSZtfTpbJLDr/lwfgO53E= +golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190530182044-ad28b68e88f1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210319071255-635bc2c9138d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211013075003-97ac67df715c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= -golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= +golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= +golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= -gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= -gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= -gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= -gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/goreleaser.yaml b/goreleaser.yaml deleted file mode 100644 index adb1b6b5..00000000 --- a/goreleaser.yaml +++ /dev/null @@ -1,136 +0,0 @@ -project_name: deepsource - -before: - hooks: - - scripts/gen-completions.sh - -builds: - # darwin-amd64 - - id: deepsource-darwin-amd64 - main: ./cmd/deepsource - env: - - CGO_ENABLED=1 - - CC=o64-clang - - CXX=o64-clang++ - flags: - - -tags=static_all - goos: - - darwin - goarch: - - amd64 - ldflags: - - "-X 'main.version={{ .Version }}' -X 'main.SentryDSN={{ .Env.DEEPSOURCE_CLI_SENTRY_DSN }}'" - - # darwin-arm64 - - id: deepsource-darwin-arm64 - main: ./cmd/deepsource - env: - - CGO_ENABLED=1 - - CC=o64-clang - - CXX=o64-clang++ - flags: - - -tags=static_all - goos: - - darwin - goarch: - - arm64 - ldflags: - - "-X 'main.version={{ .Version }}' -X 'main.SentryDSN={{ .Env.DEEPSOURCE_CLI_SENTRY_DSN }}'" - - # linux-amd64 - - id: deepsource-linux-amd64 - main: ./cmd/deepsource - env: - - CGO_ENABLED=1 - - CC=x86_64-linux-gnu-gcc - - CXX=x86_64-linux-gnu-g++ - flags: - - -tags=static_all - goos: - - linux - goarch: - - amd64 - ldflags: - - "-X 'main.version={{ .Version }}' -X 'main.SentryDSN={{ .Env.DEEPSOURCE_CLI_SENTRY_DSN }}'" - - # linux-arm64 - - id: deepsource-linux-arm64 - main: ./cmd/deepsource - env: - - CGO_ENABLED=1 - - CC=aarch64-linux-gnu-gcc - - CXX=aarch64-linux-gnu-g++ - flags: - - -tags=static_all - goos: - - linux - goarch: - - arm64 - ldflags: - - "-X 'main.version={{ .Version }}' -X 'main.SentryDSN={{ .Env.DEEPSOURCE_CLI_SENTRY_DSN }}'" - - # windows-amd64 - - id: "windows-deepsource" - main: ./cmd/deepsource - env: - - CGO_ENABLED=1 - - CC=x86_64-w64-mingw32-gcc - - CXX=x86_64-w64-mingw32-g++ - flags: - - -tags=static_all - goos: - - windows - goarch: - - amd64 - ldflags: - - buildmode=exe - - "-X 'main.version={{ .Version }}' -X 'main.SentryDSN={{ .Env.DEEPSOURCE_CLI_SENTRY_DSN }}'" - -archives: - - id: arch_rename - builds: - - deepsource-darwin-amd64 - - deepsource-linux-amd64 - - windows-deepsource - name_template: >- - deepsource_{{ .Version }}_{{ .Os }}_ - {{- if eq .Arch "amd64" }}x86_64 - {{- else if eq .Arch "386" }}i386 - {{- else }}{{ .Arch }}{{ end }} - files: - - completions/* - - id: default - name_template: >- - deepsource_{{ .Version }}_{{ .Os }}_{{ .Arch }} - files: - - completions/* -checksum: - name_template: 'checksums.txt' -snapshot: - name_template: "{{ .Tag }}-next" -changelog: - sort: asc - filters: - exclude: - - '^tests:' - -brews: - - tap: - owner: DeepSourceCorp - name: homebrew-cli - branch: cli-release - token: "{{ .Env.HOMEBREW_TOKEN }}" - ids: - - arch_rename - commit_author: - name: deepsourcebot - email: bot@deepsource.io - homepage: "https://github.com/deepsourcelabs/cli" - description: "Command line interface to DeepSource" - license: "BSD 2-Clause Simplified License" - install: | - bin.install "deepsource" - bash_completion.install "completions/deepsource.bash" => "deepsource" - zsh_completion.install "completions/deepsource.zsh" => "_deepsource" - fish_completion.install "completions/deepsource.fish" - skip_upload: auto diff --git a/internal/adapters/dual_output.go b/internal/adapters/dual_output.go new file mode 100644 index 00000000..1709b061 --- /dev/null +++ b/internal/adapters/dual_output.go @@ -0,0 +1,33 @@ +package adapters + +import ( + "fmt" + "io" + "os" +) + +// DualOutput writes user output to stdout and diagnostics to stderr (and optional log file). +type DualOutput struct { + user io.Writer + err io.Writer +} + +func NewDualOutput() *DualOutput { + return &DualOutput{user: os.Stdout, err: os.Stderr} +} + +func (o *DualOutput) Write(p []byte) (n int, err error) { + return o.user.Write(p) +} + +func (o *DualOutput) WriteError(p []byte) (n int, err error) { + return o.err.Write(p) +} + +func (o *DualOutput) Printf(format string, a ...interface{}) { + fmt.Fprintf(o.user, format, a...) +} + +func (o *DualOutput) Errorf(format string, a ...interface{}) { + fmt.Fprintf(o.err, format, a...) +} diff --git a/internal/adapters/env.go b/internal/adapters/env.go new file mode 100644 index 00000000..af7f986b --- /dev/null +++ b/internal/adapters/env.go @@ -0,0 +1,54 @@ +package adapters + +import ( + "os" + "sync" +) + +// RealEnvironment proxies to os environment variables. +type RealEnvironment struct{} + +func NewRealEnvironment() *RealEnvironment { + return &RealEnvironment{} +} + +func (e *RealEnvironment) Get(key string) string { + return os.Getenv(key) +} + +func (e *RealEnvironment) Set(key string, value string) error { + return os.Setenv(key, value) +} + +func (e *RealEnvironment) Lookup(key string) (string, bool) { + return os.LookupEnv(key) +} + +// MockEnvironment stores environment variables in memory. +type MockEnvironment struct { + mu sync.RWMutex + data map[string]string +} + +func NewMockEnvironment() *MockEnvironment { + return &MockEnvironment{data: make(map[string]string)} +} + +func (e *MockEnvironment) Get(key string) string { + value, _ := e.Lookup(key) + return value +} + +func (e *MockEnvironment) Set(key string, value string) error { + e.mu.Lock() + defer e.mu.Unlock() + e.data[key] = value + return nil +} + +func (e *MockEnvironment) Lookup(key string) (string, bool) { + e.mu.RLock() + defer e.mu.RUnlock() + value, ok := e.data[key] + return value, ok +} diff --git a/internal/adapters/filesystem.go b/internal/adapters/filesystem.go new file mode 100644 index 00000000..2ea4a71a --- /dev/null +++ b/internal/adapters/filesystem.go @@ -0,0 +1,30 @@ +package adapters + +import "os" + +// OSFileSystem uses the host filesystem. +type OSFileSystem struct{} + +func NewOSFileSystem() *OSFileSystem { + return &OSFileSystem{} +} + +func (fs *OSFileSystem) ReadFile(path string) ([]byte, error) { + return os.ReadFile(path) +} + +func (fs *OSFileSystem) WriteFile(path string, data []byte, perm os.FileMode) error { + return os.WriteFile(path, data, perm) +} + +func (fs *OSFileSystem) Stat(path string) (os.FileInfo, error) { + return os.Stat(path) +} + +func (fs *OSFileSystem) MkdirAll(path string, perm os.FileMode) error { + return os.MkdirAll(path, perm) +} + +func (fs *OSFileSystem) Remove(path string) error { + return os.Remove(path) +} diff --git a/internal/adapters/git.go b/internal/adapters/git.go new file mode 100644 index 00000000..b3a0cf32 --- /dev/null +++ b/internal/adapters/git.go @@ -0,0 +1,139 @@ +package adapters + +import ( + "bytes" + "os" + "os/exec" + "strings" + + "github.com/deepsourcelabs/cli/internal/interfaces" + "github.com/deepsourcelabs/cli/internal/vcs" +) + +// RealGitClient provides git operations using the host environment. +type RealGitClient struct{} + +func NewRealGitClient() *RealGitClient { + return &RealGitClient{} +} + +func (g *RealGitClient) GetHead(workspaceDir string) (string, string, error) { + return gitGetHead(workspaceDir) +} + +func (g *RealGitClient) ListRemotes(dir string) (map[string]interfaces.RemoteInfo, error) { + remotes, err := vcs.ListRemotes() + if err != nil { + return nil, err + } + + result := make(map[string]interfaces.RemoteInfo, len(remotes)) + for name, fields := range remotes { + info := interfaces.RemoteInfo{} + if len(fields) > 0 { + info.Owner = fields[0] + } + if len(fields) > 1 { + info.RepoName = fields[1] + } + if len(fields) > 2 { + info.Provider = fields[2] + } + if len(fields) > 3 { + info.DisplayName = fields[3] + } + result[name] = info + } + + return result, nil +} + +// gitGetHead accepts a git directory and returns head commit OID / error. +func gitGetHead(workspaceDir string) (headOID string, warning string, err error) { + // Check if DeepSource's Test coverage action triggered this first before executing any git commands. + headOID, err = getTestCoverageActionCommit() + if headOID != "" { + return + } + + // Check if the `GIT_COMMIT_SHA` environment variable exists. If yes, return this as + // the latest commit sha. + if injectedSHA, isManuallyInjectedSHA := os.LookupEnv("GIT_COMMIT_SHA"); isManuallyInjectedSHA { + return injectedSHA, "", nil + } + + // Get the top commit manually, using git command. + headOID, err = fetchHeadManually(workspaceDir) + if err != nil { + return + } + + // TRAVIS CI + if envUser := os.Getenv("USER"); envUser == "travis" { + headOID, warning, err = getTravisCommit(headOID) + return + } + + // GITHUB ACTIONS + if _, isGitHubEnv := os.LookupEnv("GITHUB_ACTIONS"); isGitHubEnv { + headOID, warning, err = getGitHubActionsCommit(headOID) + return + } + + // If we are here, it means there weren't any special cases. + return +} + +// Fetches the latest commit hash using the command `git rev-parse HEAD`. +func fetchHeadManually(directoryPath string) (string, error) { + cmd := exec.Command("git", "--no-pager", "rev-parse", "HEAD") + cmd.Dir = directoryPath + + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + err := cmd.Run() + outStr, _ := stdout.String(), stderr.String() + if err != nil { + return "", err + } + + // Trim newline suffix from Commit OID. + return strings.TrimSuffix(outStr, "\n"), nil +} + +// Handle special cases for GitHub Actions. +func getGitHubActionsCommit(topCommit string) (headOID string, warning string, err error) { + // When GITHUB_REF is not set, GITHUB_SHA points to original commit. + if _, isRefPresent := os.LookupEnv("GITHUB_REF"); !isRefPresent { + headOID = os.Getenv("GITHUB_SHA") + return + } + + // Detect merge commit made by GitHub Actions for pull_request events. + eventName := os.Getenv("GITHUB_EVENT_NAME") + eventCommitSha := os.Getenv("GITHUB_SHA") + if strings.HasPrefix(eventName, "pull_request") && topCommit == eventCommitSha { + warning = "Warning: Looks like the checkout step is making a merge commit. " + + "Test coverage Analyzer would not run for the reported artifact because the merge commit doesn't exist upstream.\n" + + "Please refer to the docs for required changes. Ref: https://docs.deepsource.com/docs/analyzers-test-coverage#with-github-actions" + } + headOID = topCommit + return +} + +// Return PR's HEAD ref set as env variable manually by DeepSource's Test coverage action. +func getTestCoverageActionCommit() (headOID string, err error) { + headOID = os.Getenv("GHA_HEAD_COMMIT_SHA") + return +} + +// Handle special case for TravisCI. +func getTravisCommit(topCommit string) (string, string, error) { + if prSHA := os.Getenv("TRAVIS_PULL_REQUEST_SHA"); len(prSHA) > 0 { + return prSHA, "", nil + } + + return topCommit, "", nil +} diff --git a/internal/adapters/git_mock.go b/internal/adapters/git_mock.go new file mode 100644 index 00000000..bb045031 --- /dev/null +++ b/internal/adapters/git_mock.go @@ -0,0 +1,37 @@ +package adapters + +import "github.com/deepsourcelabs/cli/internal/interfaces" + +// MockGitClient is a configurable git client for tests. +type MockGitClient struct { + HeadOID string + Warning string + Err error + Remotes map[string]interfaces.RemoteInfo +} + +func NewMockGitClient() *MockGitClient { + return &MockGitClient{Remotes: make(map[string]interfaces.RemoteInfo)} +} + +func (g *MockGitClient) SetHead(oid string, warning string) { + g.HeadOID = oid + g.Warning = warning + g.Err = nil +} + +func (g *MockGitClient) SetError(err error) { + g.Err = err +} + +func (g *MockGitClient) SetRemotes(remotes map[string]interfaces.RemoteInfo) { + g.Remotes = remotes +} + +func (g *MockGitClient) GetHead(dir string) (string, string, error) { + return g.HeadOID, g.Warning, g.Err +} + +func (g *MockGitClient) ListRemotes(dir string) (map[string]interfaces.RemoteInfo, error) { + return g.Remotes, g.Err +} diff --git a/internal/adapters/http.go b/internal/adapters/http.go new file mode 100644 index 00000000..e9eb956f --- /dev/null +++ b/internal/adapters/http.go @@ -0,0 +1,11 @@ +package adapters + +import ( + "net/http" + "time" +) + +// NewHTTPClient returns a configured HTTP client. +func NewHTTPClient(timeout time.Duration) *http.Client { + return &http.Client{Timeout: timeout} +} diff --git a/internal/adapters/http_mock.go b/internal/adapters/http_mock.go new file mode 100644 index 00000000..6a3b6b33 --- /dev/null +++ b/internal/adapters/http_mock.go @@ -0,0 +1,22 @@ +package adapters + +import ( + "errors" + "net/http" +) + +// MockHTTPClient is a configurable HTTP client for tests. +type MockHTTPClient struct { + DoFunc func(req *http.Request) (*http.Response, error) +} + +func NewMockHTTPClient() *MockHTTPClient { + return &MockHTTPClient{} +} + +func (m *MockHTTPClient) Do(req *http.Request) (*http.Response, error) { + if m.DoFunc == nil { + return nil, errors.New("mock http client: no DoFunc set") + } + return m.DoFunc(req) +} diff --git a/internal/adapters/output.go b/internal/adapters/output.go new file mode 100644 index 00000000..f2fc0f06 --- /dev/null +++ b/internal/adapters/output.go @@ -0,0 +1,68 @@ +package adapters + +import ( + "bytes" + "fmt" + "io" + "os" +) + +// StdOutput writes user-facing and error output to stdout/stderr. +type StdOutput struct { + Out io.Writer + Err io.Writer +} + +func NewStdOutput() *StdOutput { + return &StdOutput{Out: os.Stdout, Err: os.Stderr} +} + +func (o *StdOutput) Write(p []byte) (n int, err error) { + return o.Out.Write(p) +} + +func (o *StdOutput) WriteError(p []byte) (n int, err error) { + return o.Err.Write(p) +} + +func (o *StdOutput) Printf(format string, a ...interface{}) { + fmt.Fprintf(o.Out, format, a...) +} + +func (o *StdOutput) Errorf(format string, a ...interface{}) { + fmt.Fprintf(o.Err, format, a...) +} + +// BufferOutput captures output in memory for tests. +type BufferOutput struct { + Out bytes.Buffer + Err bytes.Buffer +} + +func NewBufferOutput() *BufferOutput { + return &BufferOutput{} +} + +func (o *BufferOutput) Write(p []byte) (n int, err error) { + return o.Out.Write(p) +} + +func (o *BufferOutput) WriteError(p []byte) (n int, err error) { + return o.Err.Write(p) +} + +func (o *BufferOutput) Printf(format string, a ...interface{}) { + fmt.Fprintf(&o.Out, format, a...) +} + +func (o *BufferOutput) Errorf(format string, a ...interface{}) { + fmt.Fprintf(&o.Err, format, a...) +} + +func (o *BufferOutput) String() string { + return o.Out.String() +} + +func (o *BufferOutput) ErrorString() string { + return o.Err.String() +} diff --git a/internal/adapters/sentry.go b/internal/adapters/sentry.go new file mode 100644 index 00000000..a74935df --- /dev/null +++ b/internal/adapters/sentry.go @@ -0,0 +1,36 @@ +package adapters + +import ( + "time" + + "github.com/getsentry/sentry-go" +) + +// SentryClient reports errors via sentry-go. +type SentryClient struct{} + +func NewSentryClient() *SentryClient { + return &SentryClient{} +} + +func (s *SentryClient) Init(dsn string) error { + return sentry.Init(sentry.ClientOptions{Dsn: dsn}) +} + +func (s *SentryClient) CaptureException(err error) { + sentry.CaptureException(err) +} + +func (s *SentryClient) CaptureMessage(msg string) { + sentry.CaptureMessage(msg) +} + +func (s *SentryClient) ConfigureScope(f func(scope interface{})) { + sentry.ConfigureScope(func(scope *sentry.Scope) { + f(scope) + }) +} + +func (s *SentryClient) Flush(timeout time.Duration) { + sentry.Flush(timeout) +} diff --git a/internal/adapters/sentry_noop.go b/internal/adapters/sentry_noop.go new file mode 100644 index 00000000..3c40aaf7 --- /dev/null +++ b/internal/adapters/sentry_noop.go @@ -0,0 +1,22 @@ +package adapters + +import "time" + +// NoOpSentry is a stub sentry client for tests. +type NoOpSentry struct{} + +func NewNoOpSentry() *NoOpSentry { + return &NoOpSentry{} +} + +func (n *NoOpSentry) Init(dsn string) error { + return nil +} + +func (n *NoOpSentry) CaptureException(err error) {} + +func (n *NoOpSentry) CaptureMessage(msg string) {} + +func (n *NoOpSentry) ConfigureScope(f func(scope interface{})) {} + +func (n *NoOpSentry) Flush(timeout time.Duration) {} diff --git a/utils/cmd_validator.go b/internal/cli/args/args.go similarity index 99% rename from utils/cmd_validator.go rename to internal/cli/args/args.go index 856216a3..be32598b 100644 --- a/utils/cmd_validator.go +++ b/internal/cli/args/args.go @@ -1,4 +1,4 @@ -package utils +package args import ( "errors" diff --git a/internal/cli/completion/completion.go b/internal/cli/completion/completion.go new file mode 100644 index 00000000..c8ff295a --- /dev/null +++ b/internal/cli/completion/completion.go @@ -0,0 +1,51 @@ +package completion + +import ( + "fmt" + "sort" + + "github.com/deepsourcelabs/cli/internal/vcs" +) + +// RepoCompletionCandidates returns repo args derived from git remotes for shell completion. +func RepoCompletionCandidates() []string { + remotes, err := vcs.ListRemotes() + if err != nil { + return nil + } + + candidates := make([]string, 0, len(remotes)) + for remoteName, data := range remotes { + if len(data) < 3 { + continue + } + shortcode, label := repoProviderInfo(data[2]) + if shortcode == "" { + continue + } + candidate := fmt.Sprintf("%s/%s/%s\t%s (%s)", shortcode, data[0], data[1], remoteName, label) + candidates = append(candidates, candidate) + } + + sort.Strings(candidates) + return candidates +} + +func repoProviderInfo(provider string) (string, string) { + switch provider { + case "GITHUB": + return "gh", "github" + case "GITHUB_ENTERPRISE": + return "ghe", "github enterprise" + case "GITLAB": + return "gl", "gitlab" + case "BITBUCKET": + return "bb", "bitbucket" + case "BITBUCKET_DATACENTER": + return "bbdc", "bitbucket datacenter" + case "ADS": + return "ads", "ads" + default: + return "", "" + } +} diff --git a/utils/prompt.go b/internal/cli/prompt/prompt.go similarity index 95% rename from utils/prompt.go rename to internal/cli/prompt/prompt.go index 976cc059..868bf30c 100644 --- a/utils/prompt.go +++ b/internal/cli/prompt/prompt.go @@ -1,4 +1,4 @@ -package utils +package prompt import ( "errors" @@ -31,8 +31,8 @@ func ConfirmFromUser(msg, helpText string) (bool, error) { // Used for Single Option Selection from Multiple Options // Being used for selecting Java version for configuring meta of Java analyzer // > * 1 -// * 2 -// * 3 +// - 2 +// - 3 func SelectFromOptions(msg, helpText string, opts []string) (string, error) { var result string prompt := &survey.Select{ @@ -71,10 +71,11 @@ func GetSingleLineInput(msg, helpText string) (string, error) { // Example: // ? Which languages/tools does your project use? // > [ ] Shell -// [ ] Rust -// [ ] Test Coverage -// [ ] Python -// [ ] Go +// +// [ ] Rust +// [ ] Test Coverage +// [ ] Python +// [ ] Go func SelectFromMultipleOptions(msg, helpText string, options []string) ([]string, error) { response := make([]string, 0) // Extracting languages and tools being used in the project for Analyzers diff --git a/utils/colors.go b/internal/cli/style/style.go similarity index 95% rename from utils/colors.go rename to internal/cli/style/style.go index 70f92a62..df5db7b9 100644 --- a/utils/colors.go +++ b/internal/cli/style/style.go @@ -1,4 +1,4 @@ -package utils +package style import "github.com/fatih/color" diff --git a/internal/container/container.go b/internal/container/container.go new file mode 100644 index 00000000..484233c5 --- /dev/null +++ b/internal/container/container.go @@ -0,0 +1,33 @@ +package container + +import ( + "time" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/interfaces" +) + +// Container holds application dependencies. +type Container struct { + FileSystem interfaces.FileSystem + Environment interfaces.Environment + GitClient interfaces.GitClient + HTTPClient interfaces.HTTPClient + Output interfaces.OutputWriter + Sentry interfaces.SentryClient + Config *config.Manager +} + +// New creates a production dependency container. +func New() *Container { + return &Container{ + FileSystem: adapters.NewOSFileSystem(), + Environment: adapters.NewRealEnvironment(), + GitClient: adapters.NewRealGitClient(), + HTTPClient: adapters.NewHTTPClient(60 * time.Second), + Output: adapters.NewDualOutput(), + Sentry: adapters.NewSentryClient(), + Config: config.DefaultManager(), + } +} diff --git a/internal/container/test_container.go b/internal/container/test_container.go new file mode 100644 index 00000000..442ad6f0 --- /dev/null +++ b/internal/container/test_container.go @@ -0,0 +1,30 @@ +package container + +import ( + "os" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/interfaces" +) + +// NewTest creates a dependency container with test-friendly implementations. +func NewTest() *Container { + return &Container{ + FileSystem: adapters.NewOSFileSystem(), + Environment: adapters.NewMockEnvironment(), + GitClient: adapters.NewMockGitClient(), + HTTPClient: adapters.NewMockHTTPClient(), + Output: adapters.NewBufferOutput(), + Sentry: adapters.NewNoOpSentry(), + Config: config.NewManager(adapters.NewOSFileSystem(), os.UserHomeDir), + } +} + +// Compile-time interface checks. +var _ interfaces.FileSystem = (*adapters.OSFileSystem)(nil) +var _ interfaces.Environment = (*adapters.RealEnvironment)(nil) +var _ interfaces.GitClient = (*adapters.RealGitClient)(nil) +var _ interfaces.HTTPClient = (*adapters.MockHTTPClient)(nil) +var _ interfaces.OutputWriter = (*adapters.StdOutput)(nil) +var _ interfaces.SentryClient = (*adapters.SentryClient)(nil) diff --git a/internal/errors/errors.go b/internal/errors/errors.go new file mode 100644 index 00000000..98b169e2 --- /dev/null +++ b/internal/errors/errors.go @@ -0,0 +1,95 @@ +package errors + +import ( + stderrors "errors" + "fmt" +) + +// ErrorCode represents a CLI error category. +type ErrorCode int + +const ( + ErrInvalidConfig ErrorCode = iota + 1 + ErrAuthRequired + ErrAuthExpired + ErrNetworkFailure + ErrGitOperationFailed + ErrInvalidDSN + ErrInvalidArtifact + ErrAPIError +) + +// CLIError wraps an error with a code and message. +type CLIError struct { + Code ErrorCode + Message string + Cause error +} + +func (e *CLIError) Error() string { + if e.Cause != nil { + return fmt.Sprintf("%s: %v", e.Message, e.Cause) + } + return e.Message +} + +func (e *CLIError) ExitCode() int { + return int(e.Code) +} + +func (e *CLIError) Unwrap() error { + return e.Cause +} + +func NewCLIError(code ErrorCode, message string, cause error) *CLIError { + return &CLIError{ + Code: code, + Message: message, + Cause: cause, + } +} + +// UserError marks an error as user-correctable (bad input, missing config, etc.). +// These errors should not be reported to Sentry. +type UserError struct { + Err error +} + +func (e *UserError) Error() string { + return e.Err.Error() +} + +func (e *UserError) Unwrap() error { + return e.Err +} + +func NewUserError(err error) error { + return &UserError{Err: err} +} + +func NewUserErrorf(format string, args ...interface{}) error { + return &UserError{Err: fmt.Errorf(format, args...)} +} + +// IsUserError returns true if the error chain contains a UserError or a CLIError +// with a user-error code. +func IsUserError(err error) bool { + var ue *UserError + if stderrors.As(err, &ue) { + return true + } + var ce *CLIError + if stderrors.As(err, &ce) { + return ce.Code.IsUserErrorCode() + } + return false +} + +// IsUserErrorCode returns true for error codes that represent user-correctable problems. +func (c ErrorCode) IsUserErrorCode() bool { + switch c { + case ErrInvalidConfig, ErrAuthRequired, ErrAuthExpired, ErrInvalidDSN, ErrInvalidArtifact: + return true + } + return false +} diff --git a/internal/interfaces/http.go b/internal/interfaces/http.go new file mode 100644 index 00000000..a92655bf --- /dev/null +++ b/internal/interfaces/http.go @@ -0,0 +1,8 @@ +package interfaces + +import "net/http" + +// HTTPClient abstracts http.Client for dependency injection. +type HTTPClient interface { + Do(req *http.Request) (*http.Response, error) +} diff --git a/internal/interfaces/output.go b/internal/interfaces/output.go new file mode 100644 index 00000000..9b5b1bd0 --- /dev/null +++ b/internal/interfaces/output.go @@ -0,0 +1,9 @@ +package interfaces + +// OutputWriter handles user-facing and diagnostic output. +type OutputWriter interface { + Write(p []byte) (n int, err error) + WriteError(p []byte) (n int, err error) + Printf(format string, a ...interface{}) + Errorf(format string, a ...interface{}) +} diff --git a/internal/interfaces/sentry.go b/internal/interfaces/sentry.go new file mode 100644 index 00000000..0e116eda --- /dev/null +++ b/internal/interfaces/sentry.go @@ -0,0 +1,12 @@ +package interfaces + +import "time" + +// SentryClient captures errors and events for diagnostics. +type SentryClient interface { + Init(dsn string) error + CaptureException(err error) + CaptureMessage(msg string) + ConfigureScope(f func(scope interface{})) + Flush(timeout time.Duration) +} diff --git a/internal/interfaces/system.go b/internal/interfaces/system.go new file mode 100644 index 00000000..68fca2da --- /dev/null +++ b/internal/interfaces/system.go @@ -0,0 +1,33 @@ +package interfaces + +import "os" + +// RemoteInfo describes a parsed git remote. +type RemoteInfo struct { + Owner string + RepoName string + Provider string + DisplayName string +} + +// FileSystem defines filesystem operations used by the CLI. +type FileSystem interface { + ReadFile(path string) ([]byte, error) + WriteFile(path string, data []byte, perm os.FileMode) error + Stat(path string) (os.FileInfo, error) + MkdirAll(path string, perm os.FileMode) error + Remove(path string) error +} + +// Environment defines access to environment variables. +type Environment interface { + Get(key string) string + Set(key string, value string) error + Lookup(key string) (string, bool) +} + +// GitClient defines git operations needed by the CLI. +type GitClient interface { + GetHead(dir string) (string, string, error) + ListRemotes(dir string) (map[string]RemoteInfo, error) +} diff --git a/utils/fetch_oidc_token.go b/internal/oidc/fetch_oidc_token.go similarity index 86% rename from utils/fetch_oidc_token.go rename to internal/oidc/fetch_oidc_token.go index b448cab6..e53f1c0a 100644 --- a/utils/fetch_oidc_token.go +++ b/internal/oidc/fetch_oidc_token.go @@ -1,4 +1,4 @@ -package utils +package oidc import ( "encoding/json" @@ -38,7 +38,7 @@ func FetchOIDCTokenFromProvider(requestId, requestUrl string) (string, error) { // check if the response is 200 if resp.StatusCode != http.StatusOK { - return "", fmt.Errorf("failed to fetch OIDC token: %s", resp.Status) + return "", fmt.Errorf("Failed to fetch OIDC token: %s", resp.Status) } // extract the token from the json response. The token is sent under the key `value` @@ -51,7 +51,7 @@ func FetchOIDCTokenFromProvider(requestId, requestUrl string) (string, error) { } // check if the token is empty if tokenResponse.Value == "" { - return "", fmt.Errorf("failed to fetch OIDC token: empty token") + return "", fmt.Errorf("Failed to fetch OIDC token: empty token") } // return the token return tokenResponse.Value, nil @@ -76,7 +76,7 @@ func ExchangeOIDCTokenForTempDSN(oidcToken, dsEndpoint, provider string) (string } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - return "", fmt.Errorf("failed to exchange OIDC token for DSN: %s", resp.Status) + return "", fmt.Errorf("Failed to exchange OIDC token for DSN: %s", resp.Status) } var exchangeResponse ExchangeResponse if err := json.NewDecoder(resp.Body).Decode(&exchangeResponse); err != nil { @@ -84,7 +84,7 @@ func ExchangeOIDCTokenForTempDSN(oidcToken, dsEndpoint, provider string) (string } // check if the token is empty if exchangeResponse.DSN == "" { - return "", fmt.Errorf("failed to exchange OIDC token for DSN: empty token") + return "", fmt.Errorf("Failed to exchange OIDC token for DSN: empty token") } // return the token return exchangeResponse.DSN, nil @@ -98,16 +98,16 @@ func GetDSNFromOIDC(requestId, requestUrl, dsEndpoint, provider string) (string, } if dsEndpoint == "" { - return "", fmt.Errorf("--deepsource-host-endpoint can not be empty") + return "", fmt.Errorf("--deepsource-host-endpoint cannot be empty") } if provider == "" { - return "", fmt.Errorf("--oidc-provider can not be empty") + return "", fmt.Errorf("--oidc-provider cannot be empty") } isSupported := ALLOWED_PROVIDERS[provider] if !isSupported { - return "", fmt.Errorf("provider %s is not supported for OIDC Token exchange (Supported Providers: %v)", provider, ALLOWED_PROVIDERS) + return "", fmt.Errorf("Provider %s is not supported for OIDC token exchange (supported providers: %v)", provider, ALLOWED_PROVIDERS) } if requestId == "" || requestUrl == "" { var foundIDToken, foundRequestURL bool @@ -118,7 +118,7 @@ func GetDSNFromOIDC(requestId, requestUrl, dsEndpoint, provider string) (string, requestId, foundIDToken = os.LookupEnv("ACTIONS_ID_TOKEN_REQUEST_TOKEN") requestUrl, foundRequestURL = os.LookupEnv("ACTIONS_ID_TOKEN_REQUEST_URL") if !(foundIDToken && foundRequestURL) { - errMsg := `failed to fetch "ACTIONS_ID_TOKEN_REQUEST_TOKEN" and "ACTIONS_ID_TOKEN_REQUEST_URL" from environment variables. Please make sure you are running this in a GitHub Actions environment with the required permissions. Or, use '--oidc-request-token' and '--oidc-request-url' flags to pass the token and request URL` + errMsg := `Failed to fetch "ACTIONS_ID_TOKEN_REQUEST_TOKEN" and "ACTIONS_ID_TOKEN_REQUEST_URL" from environment variables. Please make sure you are running this in a GitHub Actions environment with the required permissions. Or, use '--oidc-request-token' and '--oidc-request-url' flags to pass the token and request URL` return "", fmt.Errorf("%s", errMsg) } } diff --git a/utils/fetch_oidc_token_test.go b/internal/oidc/fetch_oidc_token_test.go similarity index 94% rename from utils/fetch_oidc_token_test.go rename to internal/oidc/fetch_oidc_token_test.go index b081d881..7f5e8d4b 100644 --- a/utils/fetch_oidc_token_test.go +++ b/internal/oidc/fetch_oidc_token_test.go @@ -1,4 +1,4 @@ -package utils +package oidc import ( "encoding/json" @@ -63,7 +63,7 @@ func TestFetchOIDCTokenFromProvider(t *testing.T) { if err == nil { t.Fatal("Expected error for non-200 status, got nil") } - expectedErrorMsg := "failed to fetch OIDC token: 404 Not Found" + expectedErrorMsg := "Failed to fetch OIDC token: 404 Not Found" if !strings.Contains(err.Error(), expectedErrorMsg) { t.Errorf("Expected error message to contain '%s', got '%s'", expectedErrorMsg, err.Error()) } @@ -145,7 +145,7 @@ func TestExchangeOIDCTokenForTempDSN(t *testing.T) { if err == nil { t.Fatal("Expected error for non-200 status, got nil") } - expectedErrorMsg := "failed to exchange OIDC token for DSN: 403 Forbidden" + expectedErrorMsg := "Failed to exchange OIDC token for DSN: 403 Forbidden" if !strings.Contains(err.Error(), expectedErrorMsg) { t.Errorf("Expected error message to contain '%s', got '%s'", expectedErrorMsg, err.Error()) } @@ -252,7 +252,7 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error for empty dsEndpoint, got nil") } - if !strings.Contains(err.Error(), "--deepsource-host-endpoint can not be empty") { + if !strings.Contains(err.Error(), "--deepsource-host-endpoint cannot be empty") { t.Errorf("Unexpected error message: %s", err.Error()) } }) @@ -264,7 +264,7 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error for empty provider, got nil") } - if !strings.Contains(err.Error(), "--oidc-provider can not be empty") { + if !strings.Contains(err.Error(), "--oidc-provider cannot be empty") { t.Errorf("Unexpected error message: %s", err.Error()) } }) @@ -275,8 +275,8 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error for unsupported provider, got nil") } - if !strings.Contains(err.Error(), "provider unsupported is not supported") { - t.Errorf("Expected error message to contain 'provider unsupported is not supported', got '%s'", err.Error()) + if !strings.Contains(err.Error(), "Provider unsupported is not supported") { + t.Errorf("Expected error message to contain 'Provider unsupported is not supported', got '%s'", err.Error()) } }) @@ -294,7 +294,7 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error for missing ACTIONS_ID_TOKEN_REQUEST_TOKEN, got nil") } - if !strings.Contains(err.Error(), `failed to fetch "ACTIONS_ID_TOKEN_REQUEST_TOKEN"`) { + if !strings.Contains(err.Error(), `Failed to fetch "ACTIONS_ID_TOKEN_REQUEST_TOKEN"`) { t.Errorf("Unexpected error message: %s", err.Error()) } }) @@ -312,7 +312,7 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error for missing ACTIONS_ID_TOKEN_REQUEST_URL, got nil") } - if !strings.Contains(err.Error(), `failed to fetch "ACTIONS_ID_TOKEN_REQUEST_TOKEN"`) { // Error message covers both + if !strings.Contains(err.Error(), `Failed to fetch "ACTIONS_ID_TOKEN_REQUEST_TOKEN"`) { // Error message covers both t.Errorf("Unexpected error message: %s", err.Error()) } }) @@ -327,7 +327,7 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error when FetchOIDCTokenFromProvider fails, got nil") } - if !strings.Contains(err.Error(), "failed to fetch OIDC token") { + if !strings.Contains(err.Error(), "Failed to fetch OIDC token") { t.Errorf("Unexpected error message: %s", err.Error()) } }) @@ -342,7 +342,7 @@ func TestGetDSNFromOIDC(t *testing.T) { if err == nil { t.Fatal("Expected error when ExchangeOIDCTokenForTempDSN fails, got nil") } - if !strings.Contains(err.Error(), "failed to exchange OIDC token for DSN") { + if !strings.Contains(err.Error(), "Failed to exchange OIDC token for DSN") { t.Errorf("Unexpected error message: %s", err.Error()) } }) diff --git a/internal/secrets/keychain_darwin.go b/internal/secrets/keychain_darwin.go new file mode 100644 index 00000000..07a41860 --- /dev/null +++ b/internal/secrets/keychain_darwin.go @@ -0,0 +1,56 @@ +//go:build darwin + +package secrets + +import ( + "bytes" + "os/exec" + "strings" + + "github.com/deepsourcelabs/cli/buildinfo" +) + +type keychainStore struct { + service string +} + +// NewKeychainStore returns a macOS keychain-backed store. +func NewKeychainStore() Store { + return &keychainStore{service: buildinfo.KeychainSvc} +} + +func (k *keychainStore) Get(key string) (string, error) { + cmd := exec.Command("security", "find-generic-password", "-s", k.service, "-a", key, "-w") + var stderr bytes.Buffer + cmd.Stderr = &stderr + output, err := cmd.Output() + if err != nil { + if strings.Contains(stderr.String(), "could not be found") { + return "", ErrNotFound + } + return "", err + } + return strings.TrimSpace(string(output)), nil +} + +func (k *keychainStore) Set(key string, value string) error { + cmd := exec.Command("security", "add-generic-password", "-s", k.service, "-a", key, "-w", value, "-U") + output, err := cmd.CombinedOutput() + if err != nil { + return err + } + _ = output + return nil +} + +func (k *keychainStore) Delete(key string) error { + cmd := exec.Command("security", "delete-generic-password", "-s", k.service, "-a", key) + output, err := cmd.CombinedOutput() + if err != nil { + if strings.Contains(string(output), "could not be found") { + return ErrNotFound + } + return err + } + return nil +} diff --git a/internal/secrets/keychain_stub.go b/internal/secrets/keychain_stub.go new file mode 100644 index 00000000..29e90a15 --- /dev/null +++ b/internal/secrets/keychain_stub.go @@ -0,0 +1,8 @@ +//go:build !darwin + +package secrets + +// NewKeychainStore returns a no-op store on unsupported platforms. +func NewKeychainStore() Store { + return NoopStore{} +} diff --git a/internal/secrets/store.go b/internal/secrets/store.go new file mode 100644 index 00000000..90919230 --- /dev/null +++ b/internal/secrets/store.go @@ -0,0 +1,35 @@ +package secrets + +import "errors" + +var ( + ErrNotFound = errors.New("Secret not found") + ErrUnavailable = errors.New("Secrets store unavailable") +) + +// Store provides secret storage primitives. +type Store interface { + Get(key string) (string, error) + Set(key string, value string) error + Delete(key string) error +} + +// NoopStore implements Store with no backing storage. +type NoopStore struct{} + +func (NoopStore) Get(key string) (string, error) { + return "", ErrUnavailable +} + +func (NoopStore) Set(key string, value string) error { + return ErrUnavailable +} + +func (NoopStore) Delete(key string) error { + return ErrUnavailable +} + +// DefaultStore returns the best available store for the platform. +func DefaultStore() Store { + return NewKeychainStore() +} diff --git a/internal/services/auth/service.go b/internal/services/auth/service.go new file mode 100644 index 00000000..9af4f508 --- /dev/null +++ b/internal/services/auth/service.go @@ -0,0 +1,63 @@ +package auth + +import ( + "context" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + dsauth "github.com/deepsourcelabs/cli/deepsource/auth" + dsuser "github.com/deepsourcelabs/cli/deepsource/user" +) + +// ClientFactory constructs a DeepSource API client. +type ClientFactory func(opts deepsource.ClientOpts) (*deepsource.Client, error) + +// Service handles auth-related operations. +type Service struct { + config *config.Manager + newClient ClientFactory +} + +// NewService creates a new auth service. +func NewService(configMgr *config.Manager) *Service { + return &Service{ + config: configMgr, + newClient: deepsource.New, + } +} + +func (s *Service) LoadConfig() (*config.CLIConfig, error) { + return s.config.Load() +} + +func (s *Service) SaveConfig(cfg *config.CLIConfig) error { + return s.config.Write(cfg) +} + +func (s *Service) DeleteConfig() error { + return s.config.Delete() +} + +func (s *Service) RegisterDevice(ctx context.Context, cfg *config.CLIConfig) (*dsauth.Device, error) { + client, err := s.newClient(deepsource.ClientOpts{Token: cfg.Token, HostName: cfg.Host}) + if err != nil { + return nil, err + } + return client.RegisterDevice(ctx) +} + +func (s *Service) RequestPAT(ctx context.Context, cfg *config.CLIConfig, deviceCode, description string) (*dsauth.PAT, error) { + client, err := s.newClient(deepsource.ClientOpts{Token: cfg.Token, HostName: cfg.Host}) + if err != nil { + return nil, err + } + return client.Login(ctx, deviceCode, description) +} + +func (s *Service) GetViewer(ctx context.Context, cfg *config.CLIConfig) (*dsuser.User, error) { + client, err := s.newClient(deepsource.ClientOpts{Token: cfg.Token, HostName: cfg.Host}) + if err != nil { + return nil, err + } + return client.GetViewer(ctx) +} diff --git a/internal/services/auth/service_test.go b/internal/services/auth/service_test.go new file mode 100644 index 00000000..20b960ac --- /dev/null +++ b/internal/services/auth/service_test.go @@ -0,0 +1,31 @@ +package auth + +import ( + "testing" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/secrets" + "github.com/stretchr/testify/assert" +) + +func TestServiceSaveLoadDeleteConfig(t *testing.T) { + tempDir := t.TempDir() + homeDir := func() (string, error) { return tempDir, nil } + mgr := config.NewManagerWithSecrets(adapters.NewOSFileSystem(), homeDir, secrets.NoopStore{}, "test-key") + svc := NewService(mgr) + + cfg := &config.CLIConfig{Host: "deepsource.com", User: "demo", Token: "demo-token"} + assert.NoError(t, svc.SaveConfig(cfg)) + + loaded, err := svc.LoadConfig() + assert.NoError(t, err) + assert.Equal(t, cfg.Host, loaded.Host) + assert.Equal(t, cfg.User, loaded.User) + assert.Equal(t, cfg.Token, loaded.Token) + + assert.NoError(t, svc.DeleteConfig()) + loaded, err = svc.LoadConfig() + assert.NoError(t, err) + assert.Empty(t, loaded.Token) +} diff --git a/internal/services/repo/service.go b/internal/services/repo/service.go new file mode 100644 index 00000000..be86e155 --- /dev/null +++ b/internal/services/repo/service.go @@ -0,0 +1,173 @@ +package repo + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/deepsource/analyzers" + "github.com/deepsourcelabs/cli/deepsource/repository" + "github.com/deepsourcelabs/cli/internal/vcs" +) + +// Client defines the repo API used by the service. +type Client interface { + GetRepoStatus(ctx context.Context, owner, repoName, provider string) (*repository.Meta, error) + GetEnabledAnalyzers(ctx context.Context, owner, repoName, provider string) ([]analyzers.Analyzer, error) +} + +// ClientFactory constructs a repo client. +type ClientFactory func(opts deepsource.ClientOpts) (Client, error) + +// Service provides repository operations. +type Service struct { + config *config.Manager + newClient ClientFactory + resolveRemote func(repoArg string) (*vcs.RemoteData, error) +} + +// NewService creates a repo service. +func NewService(configMgr *config.Manager) *Service { + return &Service{ + config: configMgr, + newClient: func(opts deepsource.ClientOpts) (Client, error) { return deepsource.New(opts) }, + resolveRemote: vcs.ResolveRemote, + } +} + +// NewTestService creates a repo service with injectable client factory for testing. +func NewTestService(configMgr *config.Manager, clientFactory ClientFactory) *Service { + return &Service{ + config: configMgr, + newClient: clientFactory, + resolveRemote: vcs.ResolveRemote, + } +} + +// StatusResult holds repository status info. +type StatusResult struct { + Remote *vcs.RemoteData + Activated bool + Host string +} + +// Status checks repository activation status. +func (s *Service) Status(ctx context.Context, repoArg string) (*StatusResult, error) { + cfg, err := s.config.Load() + if err != nil { + return nil, fmt.Errorf("Error while reading DeepSource CLI config : %v", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return nil, err + } + + remote, err := s.resolveRemote(repoArg) + if err != nil { + return nil, err + } + + client, err := s.newClient(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: s.config.TokenRefreshCallback(), + }) + if err != nil { + return nil, err + } + + statusResponse, err := client.GetRepoStatus(ctx, remote.Owner, remote.RepoName, remote.VCSProvider) + if err != nil { + return nil, err + } + + return &StatusResult{Remote: remote, Activated: statusResponse.Activated, Host: cfg.Host}, nil +} + +// ViewURL validates access and returns the dashboard URL. +func (s *Service) ViewURL(ctx context.Context, repoArg string) (string, error) { + cfg, err := s.config.Load() + if err != nil { + return "", fmt.Errorf("Error while reading DeepSource CLI config : %v", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return "", err + } + + remote, err := s.resolveRemote(repoArg) + if err != nil { + return "", err + } + + client, err := s.newClient(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: s.config.TokenRefreshCallback(), + }) + if err != nil { + return "", err + } + + _, err = client.GetRepoStatus(ctx, remote.Owner, remote.RepoName, remote.VCSProvider) + if err != nil { + if strings.Contains(err.Error(), "Repository matching query does not exist") { + return "", errors.New("Unauthorized access. Please login if you haven't using the command `deepsource auth login`") + } + return "", err + } + + vcsShortcode := vcsShortcode(remote.VCSProvider) + if vcsShortcode == "" { + return "", fmt.Errorf("Unknown VCS provider: %s", remote.VCSProvider) + } + + return fmt.Sprintf("https://%s/%s/%s/%s/", cfg.Host, vcsShortcode, remote.Owner, remote.RepoName), nil +} + +// EnabledAnalyzers returns the analyzers enabled on a repository. +func (s *Service) EnabledAnalyzers(ctx context.Context, repoArg string) ([]analyzers.Analyzer, error) { + cfg, err := s.config.Load() + if err != nil { + return nil, fmt.Errorf("Error while reading DeepSource CLI config : %v", err) + } + if err := cfg.VerifyAuthentication(); err != nil { + return nil, err + } + + remote, err := s.resolveRemote(repoArg) + if err != nil { + return nil, err + } + + client, err := s.newClient(deepsource.ClientOpts{ + Token: cfg.Token, + HostName: cfg.Host, + OnTokenRefreshed: s.config.TokenRefreshCallback(), + }) + if err != nil { + return nil, err + } + + return client.GetEnabledAnalyzers(ctx, remote.Owner, remote.RepoName, remote.VCSProvider) +} + +func vcsShortcode(provider string) string { + switch provider { + case "GITHUB": + return "gh" + case "GITHUB_ENTERPRISE": + return "ghe" + case "GITLAB": + return "gl" + case "BITBUCKET": + return "bb" + case "BITBUCKET_DATACENTER": + return "bbdc" + case "ADS": + return "ads" + default: + return "" + } +} diff --git a/internal/services/repo/service_test.go b/internal/services/repo/service_test.go new file mode 100644 index 00000000..669145ca --- /dev/null +++ b/internal/services/repo/service_test.go @@ -0,0 +1,72 @@ +package repo + +import ( + "context" + "errors" + "testing" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource" + "github.com/deepsourcelabs/cli/deepsource/analyzers" + "github.com/deepsourcelabs/cli/deepsource/repository" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/secrets" + "github.com/deepsourcelabs/cli/internal/vcs" + "github.com/stretchr/testify/assert" +) + +type fakeRepoClient struct { + status *repository.Meta + err error +} + +func (f *fakeRepoClient) GetRepoStatus(ctx context.Context, owner, repoName, provider string) (*repository.Meta, error) { + return f.status, f.err +} + +func (f *fakeRepoClient) GetEnabledAnalyzers(ctx context.Context, owner, repoName, provider string) ([]analyzers.Analyzer, error) { + return nil, f.err +} + +func TestServiceStatus(t *testing.T) { + tempDir := t.TempDir() + homeDir := func() (string, error) { return tempDir, nil } + mgr := config.NewManagerWithSecrets(adapters.NewOSFileSystem(), homeDir, secrets.NoopStore{}, "test-key") + cfg := &config.CLIConfig{Host: "deepsource.com", User: "demo", Token: "token"} + assert.NoError(t, mgr.Write(cfg)) + + svc := NewService(mgr) + svc.resolveRemote = func(repoArg string) (*vcs.RemoteData, error) { + return &vcs.RemoteData{Owner: "o", RepoName: "r", VCSProvider: "GITHUB"}, nil + } + svc.newClient = func(opts deepsource.ClientOpts) (Client, error) { + return &fakeRepoClient{status: &repository.Meta{Activated: true}}, nil + } + + result, err := svc.Status(context.Background(), "") + assert.NoError(t, err) + if assert.NotNil(t, result) { + assert.True(t, result.Activated) + assert.Equal(t, "deepsource.com", result.Host) + } +} + +func TestServiceViewURLUnauthorized(t *testing.T) { + tempDir := t.TempDir() + homeDir := func() (string, error) { return tempDir, nil } + mgr := config.NewManagerWithSecrets(adapters.NewOSFileSystem(), homeDir, secrets.NoopStore{}, "test-key") + cfg := &config.CLIConfig{Host: "deepsource.com", User: "demo", Token: "token"} + assert.NoError(t, mgr.Write(cfg)) + + svc := NewService(mgr) + svc.resolveRemote = func(repoArg string) (*vcs.RemoteData, error) { + return &vcs.RemoteData{Owner: "o", RepoName: "r", VCSProvider: "GITHUB"}, nil + } + svc.newClient = func(opts deepsource.ClientOpts) (Client, error) { + return &fakeRepoClient{err: errors.New("Repository matching query does not exist")}, nil + } + + _, err := svc.ViewURL(context.Background(), "") + assert.Error(t, err) + assert.Contains(t, err.Error(), "Unauthorized") +} diff --git a/internal/services/report/constants.go b/internal/services/report/constants.go new file mode 100644 index 00000000..b279da2c --- /dev/null +++ b/internal/services/report/constants.go @@ -0,0 +1,9 @@ +package report + +const ( + CliVersion = "v0.8.0" + + reportGraphqlQuery = "mutation($input: CreateArtifactInput!) {\r\n createArtifact(input: $input) {\r\n ok\r\n message\r\n error\r\n }\r\n}" + reportGraphqlQueryOld = "mutation($input: CreateArtifactInput!) {\r\n createArtifact(input: $input) {\r\n ok\r\n error\r\n }\r\n}" + graphqlCheckCompressed = "query {\r\n __type(name: \"ArtifactMetadataInput\") {\r\n inputFields {\r\n name\r\n }\r\n }\r\n}" +) diff --git a/internal/services/report/dsn.go b/internal/services/report/dsn.go new file mode 100644 index 00000000..c716d02c --- /dev/null +++ b/internal/services/report/dsn.go @@ -0,0 +1,27 @@ +package report + +import ( + "errors" + "regexp" +) + +var ErrInvalidDSN = errors.New("DeepSource | Error | Invalid DSN. Expected format: https://token@host. Cross verify DEEPSOURCE_DSN against the repository settings page") + +type DSN struct { + Protocol string + Host string + Token string +} + +func NewDSN(raw string) (*DSN, error) { + dsnPattern := regexp.MustCompile(`^(https?)://([^:@]+)@([^:/]+(?:\:\d+)?)`) + matches := dsnPattern.FindStringSubmatch(raw) + if len(matches) != 4 { + return nil, ErrInvalidDSN + } + return &DSN{ + Protocol: matches[1], + Token: matches[2], + Host: matches[3], + }, nil +} diff --git a/internal/services/report/service.go b/internal/services/report/service.go new file mode 100644 index 00000000..e71356d3 --- /dev/null +++ b/internal/services/report/service.go @@ -0,0 +1,334 @@ +package report + +import ( + "bytes" + "context" + "crypto/tls" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "os" + "strings" + "time" + + "github.com/klauspost/compress/zstd" + clierrors "github.com/deepsourcelabs/cli/internal/errors" + "github.com/deepsourcelabs/cli/internal/interfaces" + "github.com/deepsourcelabs/cli/internal/oidc" +) + +type ServiceDeps struct { + GitClient interfaces.GitClient + HTTPClient interfaces.HTTPClient + FileSystem interfaces.FileSystem + Environment interfaces.Environment + Sentry interfaces.SentryClient + Output interfaces.OutputWriter + Workdir func() (string, error) +} + +// Service handles artifact reporting. +type Service struct { + git interfaces.GitClient + http interfaces.HTTPClient + fs interfaces.FileSystem + env interfaces.Environment + sentry interfaces.SentryClient + output interfaces.OutputWriter + workdir func() (string, error) +} + +// NewService creates a report service. +func NewService(deps ServiceDeps) *Service { + workdir := deps.Workdir + if workdir == nil { + workdir = os.Getwd + } + return &Service{ + git: deps.GitClient, + http: deps.HTTPClient, + fs: deps.FileSystem, + env: deps.Environment, + sentry: deps.Sentry, + output: deps.Output, + workdir: workdir, + } +} + +// Report publishes an artifact and returns report metadata. +func (s *Service) Report(ctx context.Context, opts Options) (*Result, error) { + s.sanitize(&opts) + + if opts.UseOIDC { + dsn, err := oidc.GetDSNFromOIDC(opts.OIDCRequestToken, opts.OIDCRequestUrl, opts.DeepSourceHostEndpoint, opts.OIDCProvider) + if err != nil { + s.capture(err) + return nil, fmt.Errorf("DeepSource | Error | Failed to get DSN using OIDC: %w", err) + } + opts.DSN = dsn + } + + if opts.DSN == "" { + return nil, clierrors.NewUserError(errors.New("DeepSource | Error | Environment variable DEEPSOURCE_DSN not set (or) is empty. Set DEEPSOURCE_DSN from the repository settings page or use --use-oidc.")) + } + + s.infof("DeepSource | Info | Preparing artifact...\n") + currentDir, err := s.workdir() + if err != nil { + s.capture(err) + return nil, errors.New("DeepSource | Error | Unable to identify current directory") + } + + if err := s.validateKey(opts); err != nil { + uerr := clierrors.NewUserError(err) + s.capture(uerr) + return nil, uerr + } + + dsn, err := NewDSN(opts.DSN) + if err != nil { + uerr := clierrors.NewUserError(err) + s.capture(uerr) + return nil, uerr + } + + headCommitOID, warning, err := s.git.GetHead(currentDir) + if err != nil { + s.capture(err) + return nil, errors.New("DeepSource | Error | Unable to get commit OID HEAD. Make sure you are running the CLI from a git repository") + } + + if opts.Value == "" && opts.ValueFile == "" { + return nil, clierrors.NewUserError(errors.New("DeepSource | Error | '--value' (or) '--value-file' not passed")) + } + + artifactValue := opts.Value + if opts.ValueFile != "" { + if _, err := s.fs.Stat(opts.ValueFile); err != nil { + uerr := clierrors.NewUserErrorf("DeepSource | Error | Unable to read specified value file: %s", opts.ValueFile) + s.capture(uerr) + return nil, uerr + } + + valueBytes, err := s.fs.ReadFile(opts.ValueFile) + if err != nil { + uerr := clierrors.NewUserErrorf("DeepSource | Error | Unable to read specified value file: %s", opts.ValueFile) + s.capture(uerr) + return nil, uerr + } + + artifactValue = string(valueBytes) + } + + s.infof("DeepSource | Info | Checking compression support...\n") + meta := map[string]interface{}{"workDir": currentDir} + compressed, err := s.compressIfSupported(ctx, dsn, artifactValue, opts.SkipCertificateVerification, meta) + if err != nil { + s.capture(err) + return nil, err + } + artifactValue = compressed + + queryInput := ReportQueryInput{ + AccessToken: dsn.Token, + CommitOID: headCommitOID, + ReporterName: "cli", + ReporterVersion: CliVersion, + Key: opts.Key, + Data: artifactValue, + AnalyzerShortcode: opts.Analyzer, + Metadata: meta, + } + if opts.AnalyzerType != "" { + queryInput.AnalyzerType = opts.AnalyzerType + } + + query := ReportQuery{Query: reportGraphqlQuery} + query.Variables.Input = queryInput + + queryBodyBytes, err := json.Marshal(query) + if err != nil { + s.capture(err) + return nil, errors.New("DeepSource | Error | Unable to marshal query body") + } + + s.infof("DeepSource | Info | Uploading artifact...\n") + responseBody, err := s.makeQuery(ctx, dsn, queryBodyBytes, opts.SkipCertificateVerification) + if err != nil { + queryFallback := ReportQuery{Query: reportGraphqlQueryOld} + queryFallback.Variables.Input = queryInput + queryBodyBytes, err = json.Marshal(queryFallback) + if err != nil { + s.capture(err) + return nil, errors.New("DeepSource | Error | Unable to marshal query body") + } + + responseBody, err = s.makeQuery(ctx, dsn, queryBodyBytes, opts.SkipCertificateVerification) + if err != nil { + s.capture(err) + return nil, fmt.Errorf("DeepSource | Error | Reporting failed | %w", err) + } + } + + queryResponse := QueryResponse{} + if err := json.Unmarshal(responseBody, &queryResponse); err != nil { + s.capture(err) + return nil, errors.New("DeepSource | Error | Unable to parse response body") + } + + if !queryResponse.Data.CreateArtifact.Ok { + err := errors.New(queryResponse.Data.CreateArtifact.Error) + s.capture(err) + return nil, fmt.Errorf("DeepSource | Error | Reporting failed | %s", queryResponse.Data.CreateArtifact.Error) + } + + return &Result{ + Analyzer: opts.Analyzer, + Key: opts.Key, + Message: queryResponse.Data.CreateArtifact.Message, + Warning: warning, + }, nil +} + +func (s *Service) sanitize(opts *Options) { + opts.Analyzer = strings.TrimSpace(opts.Analyzer) + opts.AnalyzerType = strings.TrimSpace(opts.AnalyzerType) + opts.Key = strings.TrimSpace(opts.Key) + opts.Value = strings.TrimSpace(opts.Value) + opts.ValueFile = strings.TrimSpace(opts.ValueFile) + if opts.DSN == "" { + opts.DSN = strings.TrimSpace(s.env.Get("DEEPSOURCE_DSN")) + } + opts.OIDCRequestToken = strings.TrimSpace(opts.OIDCRequestToken) + opts.OIDCRequestUrl = strings.TrimSpace(opts.OIDCRequestUrl) + opts.DeepSourceHostEndpoint = strings.TrimSpace(opts.DeepSourceHostEndpoint) +} + +func (s *Service) validateKey(opts Options) error { + supportedKeys := map[string]bool{ + "python": true, + "go": true, + "javascript": true, + "ruby": true, + "java": true, + "scala": true, + "php": true, + "csharp": true, + "cxx": true, + "rust": true, + "swift": true, + "kotlin": true, + } + + if opts.Analyzer == "test-coverage" && !supportedKeys[opts.Key] { + return fmt.Errorf("DeepSource | Error | Invalid Key: %s (Supported Keys: %v)", opts.Key, supportedKeys) + } + + return nil +} + +func (s *Service) compressIfSupported(ctx context.Context, dsn *DSN, artifactValue string, skipVerify bool, meta map[string]interface{}) (string, error) { + q := ReportQuery{Query: graphqlCheckCompressed} + qBytes, err := json.Marshal(q) + if err != nil { + return "", fmt.Errorf("DeepSource | Error | Failed to marshal query: %w", err) + } + + response, err := s.makeQuery(ctx, dsn, qBytes, skipVerify) + if err != nil { + return "", fmt.Errorf("DeepSource | Error | Failed to make query: %w", err) + } + + var res struct { + Data struct { + Type struct { + InputFields []struct { + Name string `json:"name"` + } `json:"inputFields"` + } `json:"__type"` + } `json:"data"` + } + + if err := json.Unmarshal(response, &res); err != nil { + return "", fmt.Errorf("DeepSource | Error | Failed to unmarshal response: %w", err) + } + + for _, inputField := range res.Data.Type.InputFields { + if inputField.Name != "compressed" { + continue + } + + encoder, err := zstd.NewWriter(nil, zstd.WithEncoderLevel(zstd.SpeedBestCompression)) + if err != nil { + return "", fmt.Errorf("DeepSource | Error | Failed to create zstd encoder: %w", err) + } + compressedBytes := encoder.EncodeAll([]byte(artifactValue), nil) + + meta["compressed"] = "True" + return base64.StdEncoding.EncodeToString(compressedBytes), nil + } + + return artifactValue, nil +} + +func (s *Service) makeQuery(ctx context.Context, dsn *DSN, body []byte, skipVerify bool) ([]byte, error) { + url := dsn.Protocol + "://" + dsn.Host + "/graphql/cli/" + + req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + var resBody []byte + + client := s.http + if skipVerify { + client = &http.Client{ + Timeout: time.Second * 60, + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + }, + } + } + + res, err := client.Do(req) + if err != nil { + return resBody, err + } + defer res.Body.Close() + + resBody, err = io.ReadAll(res.Body) + if err != nil { + return resBody, err + } + + if res.StatusCode >= http.StatusInternalServerError || res.StatusCode != 200 { + if resBody != nil { + return resBody, fmt.Errorf("Server responded with %d: %s", res.StatusCode, string(resBody)) + } + return resBody, fmt.Errorf("Server responded with %d", res.StatusCode) + } + + return resBody, nil +} + +func (s *Service) capture(err error) { + if s.sentry == nil { + return + } + if clierrors.IsUserError(err) { + return + } + s.sentry.CaptureException(err) +} + +func (s *Service) infof(format string, args ...interface{}) { + if s.output == nil { + return + } + s.output.Printf(format, args...) +} diff --git a/internal/services/report/service_test.go b/internal/services/report/service_test.go new file mode 100644 index 00000000..2a1deba9 --- /dev/null +++ b/internal/services/report/service_test.go @@ -0,0 +1,112 @@ +package report + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/stretchr/testify/assert" +) + +type mockHTTPClient struct { + DoFunc func(req *http.Request) (*http.Response, error) +} + +func (m *mockHTTPClient) Do(req *http.Request) (*http.Response, error) { + return m.DoFunc(req) +} + +func TestReportSuccess(t *testing.T) { + tempDir := t.TempDir() + artifactPath := filepath.Join(tempDir, "coverage.xml") + assert.NoError(t, os.WriteFile(artifactPath, []byte(""), 0o644)) + + httpClient := &mockHTTPClient{DoFunc: func(req *http.Request) (*http.Response, error) { + body, _ := io.ReadAll(req.Body) + _ = req.Body.Close() + if bytes.Contains(body, []byte("ArtifactMetadataInput")) { + payload := `{"data":{"__type":{"inputFields":[{"name":"compressed"}]}}}` + return httpResponse(200, payload), nil + } + if bytes.Contains(body, []byte("createArtifact")) { + payload := `{"data":{"createArtifact":{"ok":true,"message":"ok","error":""}}}` + return httpResponse(200, payload), nil + } + return httpResponse(400, `{"error":"unexpected"}`), nil + }} + + git := adapters.NewMockGitClient() + git.SetHead("abc123", "") + env := adapters.NewMockEnvironment() + env.Set("DEEPSOURCE_DSN", "https://token@localhost:8080") + + svc := NewService(ServiceDeps{ + GitClient: git, + HTTPClient: httpClient, + FileSystem: adapters.NewOSFileSystem(), + Environment: env, + Sentry: adapters.NewNoOpSentry(), + Output: adapters.NewBufferOutput(), + Workdir: func() (string, error) { + return tempDir, nil + }, + }) + + result, err := svc.Report(context.Background(), Options{ + Analyzer: "test-coverage", + Key: "python", + ValueFile: artifactPath, + }) + + assert.NoError(t, err) + if assert.NotNil(t, result) { + assert.Equal(t, "test-coverage", result.Analyzer) + assert.Equal(t, "python", result.Key) + assert.Equal(t, "ok", result.Message) + } +} + +func TestReportMissingValue(t *testing.T) { + git := adapters.NewMockGitClient() + git.SetHead("abc123", "") + env := adapters.NewMockEnvironment() + env.Set("DEEPSOURCE_DSN", "https://token@localhost:8080") + + svc := NewService(ServiceDeps{ + GitClient: git, + HTTPClient: &mockHTTPClient{DoFunc: func(req *http.Request) (*http.Response, error) { return nil, nil }}, + FileSystem: adapters.NewOSFileSystem(), + Environment: env, + Sentry: adapters.NewNoOpSentry(), + Workdir: func() (string, error) { + return "/tmp", nil + }, + }) + + _, err := svc.Report(context.Background(), Options{Analyzer: "test-coverage", Key: "python"}) + assert.Error(t, err) + assert.True(t, strings.Contains(err.Error(), "--value")) +} + +func httpResponse(code int, body string) *http.Response { + return &http.Response{ + StatusCode: code, + Body: io.NopCloser(bytes.NewBufferString(body)), + Header: make(http.Header), + } +} + +func TestReportJSONQueryPayload(t *testing.T) { + input := ReportQuery{Query: reportGraphqlQuery} + input.Variables.Input = ReportQueryInput{AccessToken: "token"} + payload, err := json.Marshal(input) + assert.NoError(t, err) + assert.True(t, strings.Contains(string(payload), "createArtifact")) +} diff --git a/internal/services/report/types.go b/internal/services/report/types.go new file mode 100644 index 00000000..5415ba37 --- /dev/null +++ b/internal/services/report/types.go @@ -0,0 +1,57 @@ +package report + +// ReportQueryInput is the schema for variables of artifacts report GraphQL query. +type ReportQueryInput struct { + AccessToken string `json:"accessToken"` + CommitOID string `json:"commitOid"` + ReporterName string `json:"reporter"` + ReporterVersion string `json:"reporterVersion"` + Key string `json:"key"` + Data string `json:"data"` + AnalyzerShortcode string `json:"analyzer"` + AnalyzerType string `json:"analyzerType,omitempty"` + Metadata interface{} `json:"metadata,omitempty"` +} + +// ReportQuery is the structure of artifacts report GraphQL query. +type ReportQuery struct { + Query string `json:"query"` + Variables struct { + Input ReportQueryInput `json:"input"` + } `json:"variables"` +} + +// QueryResponse is the response returned by artifacts report GraphQL query. +type QueryResponse struct { + Data struct { + CreateArtifact struct { + Error string `json:"error"` + Message string `json:"message"` + Ok bool `json:"ok"` + } `json:"createArtifact"` + } `json:"data"` +} + +// Options defines inputs for reporting artifacts. +type Options struct { + Analyzer string + AnalyzerType string + Key string + Value string + ValueFile string + SkipCertificateVerification bool + DSN string + UseOIDC bool + OIDCRequestToken string + OIDCRequestUrl string + DeepSourceHostEndpoint string + OIDCProvider string +} + +// Result captures report output metadata. +type Result struct { + Analyzer string + Key string + Message string + Warning string +} diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go new file mode 100644 index 00000000..b4c7c445 --- /dev/null +++ b/internal/testutil/testutil.go @@ -0,0 +1,83 @@ +package testutil + +import ( + "context" + "encoding/json" + "os" + "strings" + "testing" + "time" + + "github.com/deepsourcelabs/cli/config" + "github.com/deepsourcelabs/cli/deepsource/graphqlclient" + "github.com/deepsourcelabs/cli/internal/adapters" + "github.com/deepsourcelabs/cli/internal/secrets" +) + +// CreateTestConfigManager creates a config.Manager backed by a temp directory +// with a valid token, host, and user pre-written to config. +func CreateTestConfigManager(t *testing.T, token, host, user string) *config.Manager { + t.Helper() + + tmpDir := t.TempDir() + fs := adapters.NewOSFileSystem() + mgr := config.NewManagerWithSecrets(fs, func() (string, error) { + return tmpDir, nil + }, secrets.NoopStore{}, "") + + cfg := &config.CLIConfig{ + Token: token, + Host: host, + User: user, + TokenExpiresIn: time.Now().Add(24 * time.Hour), + } + if err := mgr.Write(cfg); err != nil { + t.Fatalf("failed to write test config: %v", err) + } + + return mgr +} + +// LoadGoldenFile reads a golden file from the given absolute path. +func LoadGoldenFile(t *testing.T, path string) []byte { + t.Helper() + + data, err := os.ReadFile(path) + if err != nil { + t.Fatalf("failed to read golden file %s: %v", path, err) + } + return data +} + +// MockQueryFunc creates a graphqlclient.MockClient with a QueryFunc that +// routes based on query content substrings and returns golden file responses. +// The routes map keys are substrings matched against the GraphQL query string, +// and values are absolute paths to golden JSON response files. +func MockQueryFunc(t *testing.T, routes map[string]string) *graphqlclient.MockClient { + t.Helper() + + type route struct { + substring string + data []byte + } + loaded := make([]route, 0, len(routes)) + for substr, filePath := range routes { + data, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("failed to read golden response file %s: %v", filePath, err) + } + loaded = append(loaded, route{substring: substr, data: data}) + } + + mock := graphqlclient.NewMockClient() + mock.QueryFunc = func(ctx context.Context, query string, vars map[string]any, result any) error { + for _, r := range loaded { + if strings.Contains(query, r.substring) { + return json.Unmarshal(r.data, result) + } + } + t.Fatalf("no mock route matched query: %s", query) + return nil + } + return mock +} diff --git a/utils/remote_resolver.go b/internal/vcs/remote_resolver.go similarity index 93% rename from utils/remote_resolver.go rename to internal/vcs/remote_resolver.go index 5d138282..16f5a4a0 100644 --- a/utils/remote_resolver.go +++ b/internal/vcs/remote_resolver.go @@ -1,8 +1,10 @@ -package utils +package vcs import ( "fmt" "strings" + + "github.com/deepsourcelabs/cli/internal/cli/prompt" ) type RemoteData struct { @@ -54,7 +56,7 @@ func ResolveRemote(repoArg string) (*RemoteData, error) { promptOpts = append(promptOpts, value[3]) } - selectedRemote, err := SelectFromOptions("Please select the repository:", "", promptOpts) + selectedRemote, err := prompt.SelectFromOptions("Please select the repository:", "", promptOpts) if err != nil { return nil, err } diff --git a/utils/remote_resolver_test.go b/internal/vcs/remote_resolver_test.go similarity index 99% rename from utils/remote_resolver_test.go rename to internal/vcs/remote_resolver_test.go index 294d1d82..161d0404 100644 --- a/utils/remote_resolver_test.go +++ b/internal/vcs/remote_resolver_test.go @@ -1,4 +1,4 @@ -package utils +package vcs import ( "reflect" diff --git a/utils/fetch_remote.go b/internal/vcs/remotes.go similarity index 97% rename from utils/fetch_remote.go rename to internal/vcs/remotes.go index fb05cc8f..f385e359 100644 --- a/utils/fetch_remote.go +++ b/internal/vcs/remotes.go @@ -1,4 +1,4 @@ -package utils +package vcs import ( "fmt" @@ -83,7 +83,7 @@ func ListRemotes() (map[string][]string, error) { remoteList := strings.Split(string(remotes), "\n") if len(remoteList) <= 1 { - return remoteMap, fmt.Errorf("no remotes found") + return remoteMap, fmt.Errorf("No remotes found") } // Removing the last blank element diff --git a/utils/fetch_remote_test.go b/internal/vcs/remotes_test.go similarity index 99% rename from utils/fetch_remote_test.go rename to internal/vcs/remotes_test.go index 6eac1f38..d497cc4c 100644 --- a/utils/fetch_remote_test.go +++ b/internal/vcs/remotes_test.go @@ -1,4 +1,4 @@ -package utils +package vcs import ( "reflect" diff --git a/introspection.json b/introspection.json new file mode 100644 index 00000000..65413cc4 --- /dev/null +++ b/introspection.json @@ -0,0 +1,14421 @@ +{ + "data": { + "__schema": { + "queryType": { + "name": "Query" + }, + "mutationType": { + "name": "Mutation" + }, + "subscriptionType": null, + "types": [ + { + "kind": "OBJECT", + "name": "ComplianceReport", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "historicalValues", + "description": "The historical values for this report.", + "args": [ + { + "name": "startDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "HistoricalValueItem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "complianceIssueStats", + "description": "The compliance issue stats associated with this report.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ComplianceIssueStat", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Report", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INTERFACE", + "name": "Report", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "historicalValues", + "description": "The historical values for this report.", + "args": [ + { + "name": "startDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "HistoricalValueItem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "ComplianceReport", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "InsightReport", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "IssueDistributionReport", + "ofType": null + } + ] + }, + { + "kind": "ENUM", + "name": "ReportKey", + "description": "All possible report keys.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "OWASP_TOP_10", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SANS_TOP_25", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MISRA_C", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CODE_COVERAGE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CODE_HEALTH_TREND", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ISSUE_DISTRIBUTION", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ISSUES_PREVENTED", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ISSUES_AUTOFIXED", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "String", + "description": "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Int", + "description": "The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "ReportStatus", + "description": "The different statuses possible for a report.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "PASSING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FAILING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NOOP", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "HistoricalValueItem", + "description": null, + "fields": [ + { + "name": "date", + "description": "The date of the historical value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The values associated with this item.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "HistoricalValue", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Date", + "description": "The `Date` scalar type represents a Date\nvalue as specified by\n[iso8601](https://en.wikipedia.org/wiki/ISO_8601).", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "HistoricalValue", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key associated with the value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "value", + "description": "The value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Trend", + "description": "Represents a trend for a report.", + "fields": [ + { + "name": "label", + "description": "The label associated with the trend.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "value", + "description": "The value of the trend.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "rate", + "description": "The percentage change in the trend.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": true, + "deprecationReason": "Deprecated in favor of `changePercentage`." + }, + { + "name": "changePercentage", + "description": "The percentage change in the trend.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Float", + "description": "The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ComplianceIssueStat", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key for this stat.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title for this stat.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrence", + "description": "The occurrence count of the compliance issue.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "ComplianceIssueOccurrenceCount", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ComplianceIssueOccurrenceCount", + "description": null, + "fields": [ + { + "name": "critical", + "description": "The count of critical severity issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "major", + "description": "The count of major severity issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "minor", + "description": "The count of minor severity issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "total", + "description": "The total count of issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "InsightReport", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "historicalValues", + "description": "The historical values for this report.", + "args": [ + { + "name": "startDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "HistoricalValueItem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Report", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Issue", + "description": null, + "fields": [ + { + "name": "shortcode", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "analyzer", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "autofixAvailable", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "autofixAiAvailable", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isRecommended", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "category", + "description": "Category of the issue.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssueCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "severity", + "description": "Severity of the issue.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssueSeverity", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "The description of the issue in markdown.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortDescription", + "description": "A short description of the issue.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "tags", + "description": "A list of tags associated with the issue.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "description": "Custom node class to prevent leaking primary keys as integers", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Metric", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "User", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Account", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "CodeCoverageReportRepository", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Repository", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "AnalysisRun", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Check", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Occurrence", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RepositoryMetricItem", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "MetricValue", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RepositoryIssue", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "VulnerabilityOccurrence", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Vulnerability", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Package", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "PackageVersion", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RepositoryTarget", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "IgnoreRule", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "TeamMember", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "TeamSuppressedIssue", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Transformer", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Installation", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "CodeFormatter", + "ofType": null + } + ] + }, + { + "kind": "SCALAR", + "name": "ID", + "description": "The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"4\"`) or integer (such as `4`) input value will be accepted as an ID.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Analyzer", + "description": "A DeepSource Analyzer.", + "fields": [ + { + "name": "version", + "description": "Version of the image used for this analyzer.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortcode", + "description": "Unique identifier for this analyzer globally.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": "Human-friendly name for this analyzer.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "Verbose description, written in Markdown.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "metaSchema", + "description": "Schema of the meta fields accepted by the analyzer in .deepsource.toml.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "JSONString", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "exampleConfig", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "logo", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "numIssues", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issues", + "description": null, + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "IssueConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issue", + "description": "Get a specific issue by its shortcode.", + "args": [ + { + "name": "shortcode", + "description": "Shortcode of the issue.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueDistribution", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueDistributionItem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "ENUM", + "name": "AnalyzerType", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "JSONString", + "description": "Allows use of a JSON String for input / output from the GraphQL schema.\n\nUse of this type is *not recommended* as you lose the benefits of having a defined, static\nschema (one of the key benefits of GraphQL).", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssueConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "PageInfo", + "description": "The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.", + "fields": [ + { + "name": "hasNextPage", + "description": "When paginating forwards, are there more items?", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "hasPreviousPage", + "description": "When paginating backwards, are there more items?", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "startCursor", + "description": "When paginating backwards, the cursor to continue.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "endCursor", + "description": "When paginating forwards, the cursor to continue.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Boolean", + "description": "The `Boolean` scalar type represents `true` or `false`.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssueEdge", + "description": "A Relay edge containing a `Issue` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssueDistributionItem", + "description": null, + "fields": [ + { + "name": "category", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssueCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "count", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "IssueCategory", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ANTI_PATTERN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BUG_RISK", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PERFORMANCE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SECURITY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "COVERAGE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TYPECHECK", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "STYLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DOCUMENTATION", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SECRETS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "AnalyzerType", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "CORE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "COMMUNITY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CUSTOM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "IssueSeverity", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "CRITICAL", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MAJOR", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MINOR", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssueDistributionReport", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + }, + "isDeprecated": true, + "deprecationReason": "Report doesn't have a status." + }, + { + "name": "historicalValues", + "description": "The historical values for this report.", + "args": [ + { + "name": "startDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "HistoricalValueItem", + "ofType": null + } + }, + "isDeprecated": true, + "deprecationReason": "Deprecated in favor of `values`." + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueDistributionByAnalyzer", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueDistribution", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueDistributionByCategory", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueDistribution", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Report", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ReportValueItem", + "description": "Represents the values recorded on a specific date.", + "fields": [ + { + "name": "date", + "description": "The date when the values were recorded.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The values recorded on the given date.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValue", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ReportValue", + "description": "Represents a value recorded for a report.", + "fields": [ + { + "name": "key", + "description": "The key associated with the value.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "value", + "description": "The value.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssueDistribution", + "description": null, + "fields": [ + { + "name": "key", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "value", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Metric", + "description": "A metric tracked by an analyzer.", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": "The metric's name.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortcode", + "description": "The metric's unique identifier.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "The metric's description in markdown format.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "positiveDirection", + "description": "Direction which can be considered positive for the metric.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "Direction", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "unit", + "description": "Unit suffix to apply to the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "minValueAllowed", + "description": "Lower bound for the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "maxValueAllowed", + "description": "Upper bound for the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + }, + { + "kind": "INTERFACE", + "name": "MetricDefinition", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INTERFACE", + "name": "MetricDefinition", + "description": "A metric's definition.", + "fields": [ + { + "name": "name", + "description": "The metric's name.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortcode", + "description": "The metric's unique identifier.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "The metric's description in markdown format.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "positiveDirection", + "description": "Direction which can be considered positive for the metric.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "Direction", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "unit", + "description": "Unit suffix to apply to the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "minValueAllowed", + "description": "Lower bound for the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "maxValueAllowed", + "description": "Upper bound for the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "Metric", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RepositoryMetric", + "ofType": null + } + ] + }, + { + "kind": "ENUM", + "name": "MetricShortcode", + "description": "Represents the various metric types.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "BCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DDP", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CPCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NLCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NBCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NCCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NCPCV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "Direction", + "description": "Represents the direction of a value.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "UPWARD", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DOWNWARD", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ChangesetStats", + "description": "Statistics pertaining to the changeset (of a commit or PR), as analyzed by an `AnalysisRun`.", + "fields": [ + { + "name": "lines", + "description": "Stats for number of lines in the changeset.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ChangesetStatsCounts", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "branches", + "description": "Stats for number of branches in the changeset.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ChangesetStatsCounts", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "conditions", + "description": "Stats for number of conditions in the changeset.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ChangesetStatsCounts", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ChangesetStatsCounts", + "description": "Overall and newly added number of lines (or branches or conditions) in a changeset.", + "fields": [ + { + "name": "overall", + "description": "\n Overall number of lines (or branches or conditions) across the repository.\n Note: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n ", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "overallCovered", + "description": "\n Overall number of lines (or branches or conditions) that are covered across the repository.\",\n Note: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n ", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "new", + "description": "Newly added number of lines (or branches or conditions) in the changeset.\nNote: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n ", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "newCovered", + "description": "\n Newly added number of lines (or branches or conditions) that are covered in the changeset.\n Note: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n ", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Query", + "description": null, + "fields": [ + { + "name": "viewer", + "description": "The currently authenticated user.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "User", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "transformer", + "description": "Lookup a transformer by its shortcode.", + "args": [ + { + "name": "shortcode", + "description": "Shortcode of the transformer.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Transformer", + "ofType": null + }, + "isDeprecated": true, + "deprecationReason": "Use `codeFormatter` instead" + }, + { + "name": "transformers", + "description": "List all transformers with optional filtering.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "name_Icontains", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "TransformerConnection", + "ofType": null + }, + "isDeprecated": true, + "deprecationReason": "Use `codeFormatters` instead" + }, + { + "name": "repository", + "description": "Lookup a repository on DeepSource using it's name and VCS provider.", + "args": [ + { + "name": "name", + "description": "The name of the repository to lookup.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "login", + "description": "The login or username of the account under which the repository exists.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "vcsProvider", + "description": "VCS Provider of the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VCSProvider", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Repository", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "installation", + "description": "The DeepSource installation.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Installation", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "codeFormatter", + "description": "Lookup a code formatter by its shortcode.", + "args": [ + { + "name": "shortcode", + "description": "Shortcode of the code formatter.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "CodeFormatter", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "codeFormatters", + "description": "List all code formatters with optional filtering.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "name_Icontains", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "CodeFormatterConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "analyzer", + "description": "Get an analyzer from its shortcode.", + "args": [ + { + "name": "shortcode", + "description": "Shortcode of the analyzer you'd like to get.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "analyzers", + "description": "Get all analyzers available on DeepSource.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "AnalyzerConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "run", + "description": "Fetch an AnalysisRun object from it's UID or commit OID.", + "args": [ + { + "name": "runUid", + "description": "UID of the Analysis Run you want to get.", + "type": { + "kind": "SCALAR", + "name": "UUID", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "commitOid", + "description": "Commit OID of the Analysis Run you want to get.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AnalysisRun", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "account", + "description": "An account on DeepSource (individual or team). A user can add multiple accounts from multiple VCS providers.", + "args": [ + { + "name": "login", + "description": "The login or username to lookup the account by.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "vcsProvider", + "description": "VCS Provider of the account.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VCSProvider", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Account", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "node", + "description": null, + "args": [ + { + "name": "id", + "description": "The ID of the object", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "User", + "description": null, + "fields": [ + { + "name": "firstName", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "lastName", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "email", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "accounts", + "description": "All the accounts associated with the user. This includes the team accounts the user is part of and the individual accounts they have added on DeepSource.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "AccountConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "analyticsId", + "description": "The anonymous ID used for analytics and tracking.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isBetaTester", + "description": "Whether the user is a beta tester.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ideSubscription", + "description": "The IDE subscription associated with the user.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "IDESubscription", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AccountConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AccountEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AccountEdge", + "description": "A Relay edge containing a `Account` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Account", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Account", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "login", + "description": "The unique identifier (or username) of the account.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": "The account type (individual or team).", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "AccountType", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "vcsProvider", + "description": "VCS Provider of the account.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VCSProvider", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isBetaTester", + "description": "Whether the account is a beta tester", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "avatarUrl", + "description": "URL for the account's public avatar.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "report", + "description": "Get a report associated with this account", + "args": [ + { + "name": "key", + "description": "Get the report associated with the report key", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INTERFACE", + "name": "Report", + "ofType": null + } + }, + "isDeprecated": true, + "deprecationReason": "Deprecated in favor of `reports`." + }, + { + "name": "reports", + "description": "Namespace containing all available reports.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AccountReportsNamespace", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "vcsUrl", + "description": "URL for the account on the VCS Provider.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": "Get all repositories accessible to the current user under the given account.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryConnection", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "members", + "description": "Members of the team. This is an empty list for an individual account.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "TeamMemberConnection", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "subscription", + "description": "Subscription and billing details of the account.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AccountSubscription", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "suppressedIssues", + "description": "Suppressed issues on the account/team.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "issueShortcode", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "TeamSuppressedIssueConnection", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "AccountType", + "description": null, + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "INDIVIDUAL", + "description": "A individual account.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TEAM", + "description": "A team account.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "VCSProvider", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "GITHUB", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GITLAB", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BITBUCKET", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BITBUCKET_DATACENTER", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GITHUB_ENTERPRISE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GSR", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ADS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AccountReportsNamespace", + "description": "Namespace containing all the reports available for an `Account`", + "fields": [ + { + "name": "owaspTop10", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OwaspTop10Report", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "sansTop25", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SansTop25Report", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "misraC", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "MisraCReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "codeCoverage", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CodeCoverageReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "codeHealthTrend", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CodeHealthTrendReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueDistribution", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueDistributionReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issuesPrevented", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssuesPreventedReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issuesAutofixed", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssuesAutofixedReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "OwaspTop10Report", + "description": "The OWASP Top 10 report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "securityIssueStats", + "description": "The compliance issue stats associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SecurityIssueStat", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SecurityIssueStat", + "description": null, + "fields": [ + { + "name": "key", + "description": "The key for this stat.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title for this stat.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrence", + "description": "The severity distribution for this stat.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SeverityDistribution", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SeverityDistribution", + "description": "Distribution of severity count.", + "fields": [ + { + "name": "critical", + "description": "The count of critical severity issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "major", + "description": "The count of major severity issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "minor", + "description": "The count of minor severity issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "total", + "description": "The total count of issues.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SansTop25Report", + "description": "The SANS Top 25 report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "securityIssueStats", + "description": "The compliance issue stats associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SecurityIssueStat", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "MisraCReport", + "description": "The MISRA-C report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The status of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportStatus", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "securityIssueStats", + "description": "The compliance issue stats associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SecurityIssueStat", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeCoverageReport", + "description": "The Code Coverage report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": "The list of repositories.", + "args": [ + { + "name": "q", + "description": "The query string to search for repositories.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "sortKey", + "description": "The sort key to sort the repositories results by.", + "type": { + "kind": "ENUM", + "name": "CodeCoverageReportRepositorySortKey", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "CodeCoverageReportRepositoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeCoverageReportRepositoryConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CodeCoverageReportRepositoryEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeCoverageReportRepositoryEdge", + "description": "A Relay edge containing a `CodeCoverageReportRepository` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "CodeCoverageReportRepository", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeCoverageReportRepository", + "description": "Representation of a `Repository` in the Code Coverage report.", + "fields": [ + { + "name": "name", + "description": "The name of this repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "lcvMetricValue", + "description": "The LCV metric value for this repository.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "bcvMetricValue", + "description": "The BCV metric value for this repository.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isLcvPassing", + "description": "Whether the LCV value is passing.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isBcvPassing", + "description": "Whether the BCV value is passing.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "CodeCoverageReportRepositorySortKey", + "description": "Possible options to sort the list of repositories in the Code Coverage report.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "LCV_ASCENDING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LCV_DESCENDING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BCV_ASCENDING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BCV_DESCENDING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeHealthTrendReport", + "description": "The Code Health Trend report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssuesPreventedReport", + "description": "The Issues Prevented report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssuesAutofixedReport", + "description": "The Issues Autofixed report.", + "fields": [ + { + "name": "key", + "description": "The key of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "The title of the report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "currentValue", + "description": "The current value of the report.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "The report values for this report.", + "args": [ + { + "name": "startDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "endDate", + "description": "The start date to get the report values.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Date", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ReportValueItem", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "trends", + "description": "The trends associated with this report.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Trend", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryEdge", + "description": "A Relay edge containing a `Repository` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Repository", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Repository", + "description": null, + "fields": [ + { + "name": "name", + "description": "The name of this repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "latestCommitOid", + "description": "Object ID of the latest commit on the default branch.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isPrivate", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isActivated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "account", + "description": "The account under which this repository exists.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Account", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "analysisRuns", + "description": "Past analysis runs for the repository", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "AnalysisRunConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "configJson", + "description": "The `.deepsource.toml` config of the repository represented as a JSON object.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "JSON", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "defaultBranch", + "description": "The default base branch of the repository on DeepSource.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "dsn", + "description": "The DSN for this repository.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "enabledAnalyzers", + "description": "Get all the analyzers enabled in this repository.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "AnalyzerConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issues", + "description": "Get all issues raised in the default branch of this repository. Specifying a path would only return those issues whose occurrences are present in the file at path.", + "args": [ + { + "name": "path", + "description": "Show issues for this path only.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "tags", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "analyzerIn", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RepositoryIssueConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueOccurrences", + "description": "All issue occurrences in the default branch.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "analyzerIn", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "OccurrenceConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "dependencyVulnerabilityOccurrences", + "description": "List of dependency vulnerability occurrences in the default branch.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "VulnerabilityOccurrenceConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "dependencyVulnerabilityOccurrence", + "description": "Get a dependency vulnerability occurrence by its ID.", + "args": [ + { + "name": "id", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "VulnerabilityOccurrence", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "target", + "description": "Get a specific repository target.", + "args": [ + { + "name": "id", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryTarget", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "targets", + "description": "List of repository targets for this repository.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryTargetConnection", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "report", + "description": "Get a report associated with this repository", + "args": [ + { + "name": "key", + "description": "Get the report associated with the report key", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "ReportKey", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INTERFACE", + "name": "Report", + "ofType": null + } + }, + "isDeprecated": true, + "deprecationReason": "Deprecated in favor of `reports`." + }, + { + "name": "reports", + "description": "Namespace containing all available reports.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryReportsNamespace", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "vcsProvider", + "description": "VCS Provider of the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VCSProvider", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "vcsUrl", + "description": "URL of the repository on the VCS.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "metrics", + "description": "List of all DeepSource metrics.", + "args": [ + { + "name": "shortcodeIn", + "description": "List of metric shortcodes to filter on.", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryMetric", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ignoreRules", + "description": "List of `IgnoreRule`s that exist for the repository.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "issueShortcode", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "filePath", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "IgnoreRuleConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueCategorySettings", + "description": "Issue categories configuration for the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueCategorySetting", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issuePrioritySettings", + "description": "Issue priority configuration for the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssuePrioritySetting", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "metricSettings", + "description": "Metric settings for the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "MetricSetting", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "allowAutofixAi", + "description": "Whether the account has allowed Autofix AI to run on private repositories.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "useLegacyAutofix", + "description": "Whether to use the legacy autofix engine.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AnalysisRunConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AnalysisRunEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AnalysisRunEdge", + "description": "A Relay edge containing a `AnalysisRun` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "AnalysisRun", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AnalysisRun", + "description": null, + "fields": [ + { + "name": "createdAt", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "branchName", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "baseOid", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "commitOid", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "finishedAt", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Repository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "runUid", + "description": "UID of this AnalysisRun.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "UUID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The current status of the run.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "AnalysisRunStatus", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "summary", + "description": "Summary of the analysis run", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AnalysisRunSummary", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updatedAt", + "description": "Time when the analysis run was last modified", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "checks", + "description": "Analyzer checks in the analysis run.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "analyzerIn", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "CheckConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "changesetStats", + "description": "Statistics pertaining to the changeset (of a commit or PR) in the analysis run.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "ChangesetStats", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "DateTime", + "description": "The `DateTime` scalar type represents a DateTime\nvalue as specified by\n[iso8601](https://en.wikipedia.org/wiki/ISO_8601).", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "UUID", + "description": "Leverages the internal Python implementation of UUID (uuid.UUID) to provide native UUID objects\nin fields, resolvers and input.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "AnalysisRunStatus", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "PENDING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SUCCESS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FAILURE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TIMEOUT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CANCEL", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "READY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SKIPPED", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AnalysisRunSummary", + "description": null, + "fields": [ + { + "name": "occurrencesIntroduced", + "description": "Number of issues introduced during this analysis run", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrencesResolved", + "description": "Number of issues marked as resolved in this analysis run", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrencesSuppressed", + "description": "Number of issues marked as suppressed in this analysis run", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrenceDistributionByAnalyzer", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OccurrenceDistributionByAnalyzer", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrenceDistributionByCategory", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OccurrenceDistributionByCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "OccurrenceDistributionByAnalyzer", + "description": null, + "fields": [ + { + "name": "analyzerShortcode", + "description": "Shortcode of the analyzer", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "introduced", + "description": "Number of issues detected by the analyzer", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "OccurrenceDistributionByCategory", + "description": null, + "fields": [ + { + "name": "category", + "description": "Category of the issue", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssueCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "introduced", + "description": "Number of issues detected that belong to this category", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CheckConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CheckEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CheckEdge", + "description": "A Relay edge containing a `Check` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Check", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Check", + "description": "A single analyzer check as part of an analysis run.", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "sequence", + "description": "Sequence number of the check in the analysis run it belongs to.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "status", + "description": "The current status of the check.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "CheckStatus", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "analyzer", + "description": "The analyzer related to the check.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createdAt", + "description": "Time when the check was created.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updatedAt", + "description": "Time when the check was last modified.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "finishedAt", + "description": "Time when the check finished.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "summary", + "description": "Summary of the check.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CheckSummary", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrences", + "description": "Issue occurrences found in the check.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "analyzerIn", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OccurrenceConnection", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "metrics", + "description": "List of DeepSource metrics captured in the check.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryMetric", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "CheckStatus", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "WAITING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PENDING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SUCCESS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FAILURE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TIMEOUT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CANCEL", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "READY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NEUTRAL", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ARTIFACT_TIMEOUT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SKIPPED", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CheckSummary", + "description": "Summary of a check.", + "fields": [ + { + "name": "occurrencesIntroduced", + "description": "Number of issues introduced in the check.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrencesResolved", + "description": "Number of issues resolved in the check.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrencesSuppressed", + "description": "Number of issues marked as suppressed in the check.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrenceDistributionByCategory", + "description": "The issue category distribution for the check.", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OccurrenceDistributionByCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "OccurrenceConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OccurrenceEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "OccurrenceEdge", + "description": "A Relay edge containing a `Occurrence` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Occurrence", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Occurrence", + "description": null, + "fields": [ + { + "name": "path", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "beginLine", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "beginColumn", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "endLine", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "endColumn", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issue", + "description": "The definition of the issue which has been raised.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": "Title describing the issue which has been raised here.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryMetric", + "description": "A Metric's manifestation specific to a repository.", + "fields": [ + { + "name": "name", + "description": "The metric's name.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortcode", + "description": "The metric's unique identifier.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "The metric's description in markdown format.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "positiveDirection", + "description": "Direction which can be considered positive for the metric.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "Direction", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "unit", + "description": "Unit suffix to apply to the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "minValueAllowed", + "description": "Lower bound for the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "maxValueAllowed", + "description": "Upper bound for the metric value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isReported", + "description": "Whether this metric is enabled for reporting in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isThresholdEnforced", + "description": "Whether to fail checks when thresholds are not met for the metric in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "items", + "description": "Items in the repository metric.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryMetricItem", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MetricDefinition", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryMetricItem", + "description": "An item in the `RepositoryMetric`.", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "key", + "description": "Distinct key representing the metric in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricKey", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "threshold", + "description": "Threshold value for the metric, customizable by the user. Null if no threshold is set.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "latestValue", + "description": "Latest value captured for this metric on the repository's default branch.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "latestValueDisplay", + "description": "Latest value captured for this metric on the repository's default branch. Suffixed with the unit and returned as a human-readable string.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "thresholdStatus", + "description": "The status of the threshold condition for the latest metric value on the repository's default branch.", + "args": [], + "type": { + "kind": "ENUM", + "name": "MetricThresholdStatus", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "values", + "description": "All values captured for this metric in the repository's default branch.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "commitOidIn", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "MetricValueConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "MetricKey", + "description": "Represents the key for which the metric is recorded in a repository.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "AGGREGATE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "C_AND_CPP", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CSHARP", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GO", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "JAVA", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "JAVASCRIPT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PHP", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PYTHON", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "RUBY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "RUST", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCALA", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "KOTLIN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SWIFT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "MetricThresholdStatus", + "description": "Represents the status of the threshold condition for a particular metric value.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "PASSING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FAILING", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "MetricValueConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "MetricValueEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "MetricValueEdge", + "description": "A Relay edge containing a `MetricValue` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "MetricValue", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "MetricValue", + "description": "An individual value captured for a RepositoryMetric.", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "value", + "description": "Metric value reported by the analyzer.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "valueDisplay", + "description": "Value suffixed with the unit of the metric.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "threshold", + "description": "Threshold value for the metric when this value was reported. Null if no threshold was set.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "thresholdStatus", + "description": "The status of the threshold condition for the metric value.", + "args": [], + "type": { + "kind": "ENUM", + "name": "MetricThresholdStatus", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "commitOid", + "description": "Commit SHA for which this value was recorded on the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createdAt", + "description": "The time at which the value was captured.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "JSON", + "description": "A JSON object.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AnalyzerConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "AnalyzerEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AnalyzerEdge", + "description": "A Relay edge containing a `Analyzer` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryIssueConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryIssueEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryIssueEdge", + "description": "A Relay edge containing a `RepositoryIssue` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RepositoryIssue", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryIssue", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issue", + "description": "Definition of the issue that has been raised.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "occurrences", + "description": "All occurrences of this issue in the default branch.", + "args": [ + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "analyzerIn", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "OccurrenceConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": "The repository for which this issue has been raised.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Repository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "VulnerabilityOccurrenceConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "VulnerabilityOccurrenceEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "VulnerabilityOccurrenceEdge", + "description": "A Relay edge containing a `VulnerabilityOccurrence` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "VulnerabilityOccurrence", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "VulnerabilityOccurrence", + "description": null, + "fields": [ + { + "name": "reachability", + "description": "The reachability of the vulnerability occurrence.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VulnerabilityOccurrenceReachability", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fixability", + "description": "The fixability of the vulnerability occurrence.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VulnerabilityOccurrenceFixability", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "vulnerability", + "description": "The vulnerability.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Vulnerability", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "package", + "description": "The package associated with the vulnerability occurrence.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Package", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "packageVersion", + "description": "The package version associated with the vulnerability occurrence.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PackageVersion", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "VulnerabilityOccurrenceReachability", + "description": "\n The reachability type of the vulnerability occurrence\n ", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "REACHABLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNREACHABLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNKNOWN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "VulnerabilityOccurrenceFixability", + "description": "\n The fixability type of the vulnerability occurrence\n ", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ERROR", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNFIXABLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GENERATING_FIX", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "POSSIBLY_FIXABLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MANUALLY_FIXABLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "AUTO_FIXABLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Vulnerability", + "description": null, + "fields": [ + { + "name": "cvssV2Vector", + "description": "CVSS v2 vector", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV2BaseScore", + "description": "CVSS v2 base score", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV2Severity", + "description": "Severity based on the CVSSv2 base score.", + "args": [], + "type": { + "kind": "ENUM", + "name": "VulnerabilitySeverity", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV3Vector", + "description": "CVSS v3 vector", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV3BaseScore", + "description": "CVSS v3 base score", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV3Severity", + "description": "Severity based on the CVSSv3 base score.", + "args": [], + "type": { + "kind": "ENUM", + "name": "VulnerabilitySeverity", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV4Vector", + "description": "CVSS v4 vector", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV4BaseScore", + "description": "CVSS v4 base score", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cvssV4Severity", + "description": "Severity based on the CVSSv4 base score.", + "args": [], + "type": { + "kind": "ENUM", + "name": "VulnerabilitySeverity", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "severity", + "description": "Overall implied severity.", + "args": [], + "type": { + "kind": "ENUM", + "name": "VulnerabilitySeverity", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "identifier", + "description": "The identifier of the vulnerability", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "aliases", + "description": "The aliases of the vulnerability", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "summary", + "description": "The summary of the vulnerability", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "details", + "description": "The details of the vulnerability", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "publishedAt", + "description": "The time when the vulnerability was published", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updatedAt", + "description": "The time when the vulnerability was updated", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "withdrawnAt", + "description": "The time when the vulnerability was withdrawn", + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "epssScore", + "description": "The EPSS score of the vulnerability", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "epssPercentile", + "description": "The EPSS percentile of the vulnerability", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "introducedVersions", + "description": "Introduced versions.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fixedVersions", + "description": "Fixed versions.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "referenceUrls", + "description": "Reference URLs.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "VulnerabilitySeverity", + "description": "The severity of the vulnerability.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "NONE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LOW", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MEDIUM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "HIGH", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CRITICAL", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Package", + "description": null, + "fields": [ + { + "name": "ecosystem", + "description": "The ecosystem of the package.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "Ecosystem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": "Name of the package", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "purl", + "description": "The package URL", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "Ecosystem", + "description": null, + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "NPM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PYPI", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MAVEN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GO", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "RUBYGEMS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NUGET", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PACKAGIST", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CRATES_IO", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "PackageVersion", + "description": null, + "fields": [ + { + "name": "version", + "description": "Version of the package", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "versionType", + "description": "The type of the package version.", + "args": [], + "type": { + "kind": "ENUM", + "name": "PackageVersionType", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "PackageVersionType", + "description": "The type of the package version.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "SEMVER", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ECOSYSTEM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GIT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryTarget", + "description": "A SCA target in a repository.", + "fields": [ + { + "name": "ecosystem", + "description": "The ecosystem of the target.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "Ecosystem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "packageManager", + "description": "The package manager of the target.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "PackageManager", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "manifestPath", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "lockfilePath", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "source", + "description": "The source of the target.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RepositoryTargetSource", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isActivated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "PackageManager", + "description": "\n Package managers supported by DeepSource\n ", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "REQUIREMENTS_TXT", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "POETRY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PIPFILE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PDM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UV", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NPM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "YARN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PNPM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BUN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GRADLE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MAVEN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "GO_MOD", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "RUBY_GEMS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NUGET", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PACKAGIST", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CARGO", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RepositoryTargetSource", + "description": "\n The source of the target.\n ", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "AUTO", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CUSTOM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryTargetConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RepositoryTargetEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryTargetEdge", + "description": "A Relay edge containing a `RepositoryTarget` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RepositoryTarget", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RepositoryReportsNamespace", + "description": "Namespace containing all the reports available for an `Repository`", + "fields": [ + { + "name": "owaspTop10", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "OwaspTop10Report", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "sansTop25", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SansTop25Report", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "misraC", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "MisraCReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "codeHealthTrend", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CodeHealthTrendReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issueDistribution", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssueDistributionReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issuesPrevented", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssuesPreventedReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issuesAutofixed", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IssuesAutofixedReport", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IgnoreRuleConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "IgnoreRuleEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IgnoreRuleEdge", + "description": "A Relay edge containing a `IgnoreRule` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "IgnoreRule", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IgnoreRule", + "description": "An `IgnoreRule` defines the condition on which to suppress an `Issue`'s `Occurrence`s in a `Repository`.", + "fields": [ + { + "name": "createdAt", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "level", + "description": "Ignore level of the rule.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IgnoreRuleLevel", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": "Ignore type of the rule.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IgnoreRuleType", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issue", + "description": "The `Issue` to ignore in the rule.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "filePath", + "description": "File path if rule is on `FILE` level.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "globPattern", + "description": "Glob pattern if rule is of `PATTERN` type.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "IgnoreRuleLevel", + "description": "Represents the level of an `IgnoreRule`.\n- `REPOSITORY`: suppress the issue for all files in the repository.\n- `FILE`: suppress the issue for the given file path in the repository.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "REPOSITORY", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FILE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "IgnoreRuleType", + "description": "Represents the type of an `IgnoreRule`.\n- `FOREVER`: suppress the issue in the repository always.\n- `PATTERN`: suppress the issue occurrences matching the given glob pattern in the repository.\n- `TEST_PATTERN`: suppress the issue occurrences matching the repository's specified test patterns in the repository.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "FOREVER", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PATTERN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TEST_PATTERN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssueCategorySetting", + "description": "Configuration for an `IssueCategory` in a `Repository`. Also known as Quality Gates.", + "fields": [ + { + "name": "category", + "description": "An `IssueCategory`.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssueCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isReported", + "description": "Whether issues of given category are enabled for reporting in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "canFailCheck", + "description": "Whether to fail checks when occurrence(s) of issues of given category are found in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IssuePrioritySetting", + "description": "Configuration for an `IssuePriorityType` in a `Repository`.", + "fields": [ + { + "name": "priorityType", + "description": "A `IssuePriority`.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssuePriorityType", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isReported", + "description": "Whether issues of given priority are enabled for reporting in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "canFailCheck", + "description": "Whether to fail checks when occurrence(s) of issues of given priority are found in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "IssuePriorityType", + "description": "Enum for issue priority type.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "LOW", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MEDIUM", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "HIGH", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "MetricSetting", + "description": "Configuration for a `Metric` in a `Repository`. Also known as Quality Gates.", + "fields": [ + { + "name": "metricShortcode", + "description": "The metric's unique identifier.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isReported", + "description": "Whether the metric is enabled for reporting in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isThresholdEnforced", + "description": "Whether to fail checks when the metric's thresholds are not met in the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TeamMemberConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "TeamMemberEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TeamMemberEdge", + "description": "A Relay edge containing a `TeamMember` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "TeamMember", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TeamMember", + "description": "Represents a user within a team.", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": "The User instance.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "User", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "role", + "description": "The role this user has in the team.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "TeamMemberRole", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isOwner", + "description": "Whether this user is the owner of the team.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "joinedAt", + "description": "The time when this user joined the team.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "TeamMemberRole", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ADMIN", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MEMBER", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CONTRIBUTOR", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "AccountSubscription", + "description": "Subscription and billing details of an `Account`.", + "fields": [ + { + "name": "plan", + "description": "The plan associated with this account's subscription.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "AccountSubscriptionPlan", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "AccountSubscriptionPlan", + "description": "Represents DeepSource's subscription plans.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "FREE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "STARTER", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "BUSINESS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENTERPRISE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TeamSuppressedIssueConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "TeamSuppressedIssueEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TeamSuppressedIssueEdge", + "description": "A Relay edge containing a `TeamSuppressedIssue` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "TeamSuppressedIssue", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TeamSuppressedIssue", + "description": "A `TeamSuppressedIssue` represents an issue from an analyzer that has been suppressed on the team level. This is\na global suppression that affects all repositories in the team.", + "fields": [ + { + "name": "createdAt", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "issue", + "description": "The `Issue` that is suppressed.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Issue", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": "The user who suppressed the issue.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "User", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "IDESubscription", + "description": "Subscription details of an DeepSource IDE subscription.", + "fields": [ + { + "name": "plan", + "description": "The plan of this subscription.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IDESubscriptionPlan", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "IDESubscriptionPlan", + "description": "Represents DeepSource's IDE subscription plans.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "FREE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "PRO", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SPONSORED", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Transformer", + "description": "A transformer on DeepSource.", + "fields": [ + { + "name": "analyzer", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "exampleConfig", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": "Name of the tool.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortcode", + "description": "Unique identifier for this tool globally.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "Verbose description, written in Markdown.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "logo", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TransformerConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "TransformerEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "TransformerEdge", + "description": "A Relay edge containing a `Transformer` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Transformer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Installation", + "description": "The DeepSource installation.", + "fields": [ + { + "name": "name", + "description": "The name of the installation.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "logo", + "description": "The logo URL of the installation.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeFormatter", + "description": "A code formatter on DeepSource.", + "fields": [ + { + "name": "analyzer", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "Analyzer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "exampleConfig", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": "Name of the tool.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "shortcode", + "description": "Unique identifier for this tool globally.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": "Verbose description, written in Markdown.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "logo", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "MaskPrimaryKeyNode", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeFormatterConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "CodeFormatterEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "CodeFormatterEdge", + "description": "A Relay edge containing a `CodeFormatter` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "CodeFormatter", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Mutation", + "description": null, + "fields": [ + { + "name": "suppressIssueForTeam", + "description": "Suppress an issue from an Analyzer on the team level, affecting all repositories.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "SuppressIssueForTeamInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "SuppressIssueForTeamPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "unsuppressIssueForTeam", + "description": "Remove a suppressed issue from an Analyzer on the team level, affecting all repositories.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "UnsuppressIssueForTeamInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "UnsuppressIssueForTeamPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "setRepositoryMetricThreshold", + "description": "Update the threshold for a metric in a repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "SetRepositoryMetricThresholdInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "SetRepositoryMetricThresholdPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "regenerateRepositoryDSN", + "description": "Regenerate a repository's DSN.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "RegenerateRepositoryDSNInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RegenerateRepositoryDSNPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateRepositoryDefaultBranch", + "description": "Update a repository's default branch for baseline. If the repository is activated, this action will trigger a new analysis. Only available to users with `WRITE` permission on the repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryDefaultBranchInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "UpdateRepositoryDefaultBranchPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateRepositoryIssueCategorySetting", + "description": "Update the configuration for an issue category in a repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryIssueCategorySettingInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "UpdateRepositoryIssueCategorySettingPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateRepositoryIssuePrioritySetting", + "description": "Update the configuration for an issue priority in a repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryIssuePrioritySettingInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "UpdateRepositoryIssuePrioritySettingPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateRepositoryMetricSetting", + "description": "Update the configuration for a metric in a repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryMetricSettingInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "UpdateRepositoryMetricSettingPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "activateRepository", + "description": "Activate a repository. Only available to users with `WRITE` permission on the repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "ActivateRepositoryInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "ActivateRepositoryPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deactivateRepository", + "description": "Deactivate a repository. Only available to users with `WRITE` permission on the repository.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "DeactivateRepositoryInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "DeactivateRepositoryPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "revokeToken", + "description": null, + "args": [ + { + "name": "refreshToken", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Revoke", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "verifyToken", + "description": null, + "args": [ + { + "name": "token", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Verify", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "refreshToken", + "description": null, + "args": [ + { + "name": "refreshToken", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Refresh", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "registerDevice", + "description": "Generates a unique device verification code and an end-user code that are\nvalid for a limited time.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "RegisterDeviceInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RegisterDevicePayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "requestPatWithDeviceCode", + "description": "Validates the device code provided and responds with the user's PAT.\nIf there's no PAT associated with the user, create one. If the user\nhas no access, return an error. Otherwise, indicate the client to keep\npolling.", + "args": [ + { + "name": "input", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "RequestPATWithDeviceCodeInput", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RequestPATWithDeviceCodePayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "refreshPat", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "RefreshPAT", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "revokePat", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "RevokePAT", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SuppressIssueForTeamPayload", + "description": "Suppress an issue from an Analyzer on the team level, affecting all repositories.", + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "SuppressIssueForTeamInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "issueShortcode", + "description": "The issue's shortcode.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "login", + "description": "The login or username of the account/team.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "vcsProvider", + "description": "The VCS provider of the account/team.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VCSProvider", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "UnsuppressIssueForTeamPayload", + "description": "Remove a suppressed issue from an Analyzer on the team level, affecting all repositories.", + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "UnsuppressIssueForTeamInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "issueShortcode", + "description": "The issue's shortcode.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "login", + "description": "The login or username of the account/team.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "vcsProvider", + "description": "The VCS provider of the account/team.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "VCSProvider", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SetRepositoryMetricThresholdPayload", + "description": "Update the threshold for a metric in a repository.", + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "SetRepositoryMetricThresholdInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "GraphQL node ID of the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "metricShortcode", + "description": "Metric shortcode to update the threshold for.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "metricKey", + "description": "The key of the metric you want to update the threshold for.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricKey", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "thresholdValue", + "description": "Threshold value to set. Can be null.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RegenerateRepositoryDSNPayload", + "description": null, + "fields": [ + { + "name": "dsn", + "description": "The new DSN for the repository.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "RegenerateRepositoryDSNInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "GraphQL node ID of the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "UpdateRepositoryDefaultBranchPayload", + "description": null, + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Repository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryDefaultBranchInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "id", + "description": "GraphQL node ID of the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "defaultBranchName", + "description": "Default branch for analysis on the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "UpdateRepositoryIssueCategorySettingPayload", + "description": null, + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryIssueCategorySettingInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "The repository's ID.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "issueCategory", + "description": "The issue category you want to update.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssueCategory", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "isReported", + "description": "Whether issues of given category are enabled for reporting in the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "canFailCheck", + "description": "Whether to fail checks when occurrence(s) of issues of given category are found in the repository. An issue category can only be marked to fail a check if it is enabled for reporting.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "UpdateRepositoryIssuePrioritySettingPayload", + "description": null, + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryIssuePrioritySettingInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "The repository's ID.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "issuePriorityType", + "description": "The issue priority you want to update.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "IssuePriorityType", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "isReported", + "description": "Whether issues of given priority are enabled for reporting in the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "canFailCheck", + "description": "Whether to fail checks when occurrence(s) of issues of given priority are found in the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "UpdateRepositoryMetricSettingPayload", + "description": null, + "fields": [ + { + "name": "ok", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "UpdateRepositoryMetricSettingInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "The repository's ID.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "metricShortcode", + "description": "The metric to update.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "MetricShortcode", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "isReported", + "description": "Whether the metric is enabled for reporting in the repository.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "isThresholdEnforced", + "description": "Whether to fail checks when the metric does not meet the threshold.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ActivateRepositoryPayload", + "description": null, + "fields": [ + { + "name": "ok", + "description": "Whether the repository has been activated successfully", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "ActivateRepositoryInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "The repository's ID.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "DeactivateRepositoryPayload", + "description": null, + "fields": [ + { + "name": "ok", + "description": "Whether the repository has been deactivated successfully", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "DeactivateRepositoryInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "repositoryId", + "description": "The repository's ID.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Revoke", + "description": null, + "fields": [ + { + "name": "revoked", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Verify", + "description": null, + "fields": [ + { + "name": "payload", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "GenericScalar", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "GenericScalar", + "description": "The `GenericScalar` scalar type represents a generic\nGraphQL scalar value that could be:\nString, Boolean, Int, Float, List or Object.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Refresh", + "description": null, + "fields": [ + { + "name": "payload", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "GenericScalar", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "refreshExpiresIn", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "token", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "refreshToken", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RegisterDevicePayload", + "description": "Generates a unique device verification code and an end-user code that are\nvalid for a limited time.", + "fields": [ + { + "name": "deviceCode", + "description": "The device verification code.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "userCode", + "description": "The end-user verification code.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "verificationUri", + "description": "The end-user verification URI.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "verificationUriComplete", + "description": "A verification URI that includes the 'user_code'.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "expiresIn", + "description": "The lifetime in seconds of the 'device_code' and 'user_code'.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "interval", + "description": "The minimum amount of time in seconds that the client should wait between polling requests to the token endpoint.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "RegisterDeviceInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "deviceType", + "description": null, + "type": { + "kind": "ENUM", + "name": "DeviceType", + "ofType": null + }, + "defaultValue": "CLI" + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "DeviceType", + "description": "The device type that is being registered.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "CLI", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "IDE", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RequestPATWithDeviceCodePayload", + "description": "Validates the device code provided and responds with the user's PAT.\nIf there's no PAT associated with the user, create one. If the user\nhas no access, return an error. Otherwise, indicate the client to keep\npolling.", + "fields": [ + { + "name": "token", + "description": "The personal access token corresponding to the device_code", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "expiry", + "description": "Expiry of the token, in unix time", + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "User", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "RequestPATWithDeviceCodeInput", + "description": null, + "fields": null, + "inputFields": [ + { + "name": "deviceCode", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "description", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "clientMutationId", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RefreshPAT", + "description": null, + "fields": [ + { + "name": "token", + "description": "The personal access token corresponding to the device_code", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "expiry", + "description": "Expiry of the token, in unix time", + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "User", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RevokePAT", + "description": null, + "fields": [ + { + "name": "ok", + "description": "Indication whether revoking of personal access token was successful", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Schema", + "description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", + "fields": [ + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "types", + "description": "A list of all types supported by this server.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "queryType", + "description": "The type that query operations will be rooted at.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "mutationType", + "description": "If this server supports mutation, the type that mutation operations will be rooted at.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "subscriptionType", + "description": "If this server support subscription, the type that subscription operations will be rooted at.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "directives", + "description": "A list of all directives supported by this server.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Directive", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Type", + "description": "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name, description and optional `specifiedByURL`, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", + "fields": [ + { + "name": "kind", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__TypeKind", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "specifiedByURL", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fields", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Field", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "interfaces", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "possibleTypes", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "enumValues", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__EnumValue", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "inputFields", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ofType", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__TypeKind", + "description": "An enum describing what kind of type a given `__Type` is.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "SCALAR", + "description": "Indicates this type is a scalar.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "OBJECT", + "description": "Indicates this type is an object. `fields` and `interfaces` are valid fields.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INTERFACE", + "description": "Indicates this type is an interface. `fields`, `interfaces`, and `possibleTypes` are valid fields.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNION", + "description": "Indicates this type is a union. `possibleTypes` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM", + "description": "Indicates this type is an enum. `enumValues` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_OBJECT", + "description": "Indicates this type is an input object. `inputFields` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LIST", + "description": "Indicates this type is a list. `ofType` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NON_NULL", + "description": "Indicates this type is a non-null. `ofType` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Field", + "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "args", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__InputValue", + "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "defaultValue", + "description": "A GraphQL-formatted string representing the default value for this input value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__EnumValue", + "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Directive", + "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isRepeatable", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "locations", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__DirectiveLocation", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "args", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__DirectiveLocation", + "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "QUERY", + "description": "Location adjacent to a query operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MUTATION", + "description": "Location adjacent to a mutation operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SUBSCRIPTION", + "description": "Location adjacent to a subscription operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FIELD", + "description": "Location adjacent to a field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FRAGMENT_DEFINITION", + "description": "Location adjacent to a fragment definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FRAGMENT_SPREAD", + "description": "Location adjacent to a fragment spread.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INLINE_FRAGMENT", + "description": "Location adjacent to an inline fragment.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "VARIABLE_DEFINITION", + "description": "Location adjacent to a variable definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCHEMA", + "description": "Location adjacent to a schema definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCALAR", + "description": "Location adjacent to a scalar definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "OBJECT", + "description": "Location adjacent to an object type definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FIELD_DEFINITION", + "description": "Location adjacent to a field definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ARGUMENT_DEFINITION", + "description": "Location adjacent to an argument definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INTERFACE", + "description": "Location adjacent to an interface definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNION", + "description": "Location adjacent to a union definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM", + "description": "Location adjacent to an enum definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM_VALUE", + "description": "Location adjacent to an enum value definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_OBJECT", + "description": "Location adjacent to an input object type definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_FIELD_DEFINITION", + "description": "Location adjacent to an input object field definition.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + } + ], + "directives": [ + { + "name": "include", + "description": "Directs the executor to include this field or fragment only when the `if` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Included when true.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ] + }, + { + "name": "skip", + "description": "Directs the executor to skip this field or fragment when the `if` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Skipped when true.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ] + }, + { + "name": "deprecated", + "description": "Marks an element of a GraphQL schema as no longer supported.", + "locations": [ + "FIELD_DEFINITION", + "ARGUMENT_DEFINITION", + "INPUT_FIELD_DEFINITION", + "ENUM_VALUE" + ], + "args": [ + { + "name": "reason", + "description": "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted using the Markdown syntax, as specified by [CommonMark](https://commonmark.org/).", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": "\"No longer supported\"" + } + ] + }, + { + "name": "specifiedBy", + "description": "Exposes a URL that specifies the behavior of this scalar.", + "locations": [ + "SCALAR" + ], + "args": [ + { + "name": "url", + "description": "The URL that specifies the behavior of this scalar.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ] + } + ] + } + } +} \ No newline at end of file diff --git a/justfile b/justfile new file mode 100644 index 00000000..0480bbe1 --- /dev/null +++ b/justfile @@ -0,0 +1,97 @@ +PACKAGE_NAME := "github.com/deepsourcelabs/cli" + +default: + @just --list + +# Build the CLI binary (CI alias) +build: build-local + +# Build the CLI binary to /tmp/deepsource +build-local: + cd cmd/deepsource && go build -o /tmp/deepsource . + +# Run all tests with coverage +test: + go test -v ./command/report/tests/... -count=1 + go test -v ./command/issues/tests/... -count=1 + go test -v ./command/metrics/tests/... -count=1 + go test -v ./command/vulnerabilities/tests/... -count=1 + go test -v ./command/analysis/tests/... -count=1 + go test -v ./command/repository/status/tests/... -count=1 + go test -v ./command/repository/analyzers/tests/... -count=1 + go test -v ./command/auth/status/tests/... -count=1 + echo "\n====TESTING DEEPSOURCE PACKAGE====\n" + go test -v ./deepsource/tests/... + echo "\n====CALCULATING TEST COVERAGE FOR ENTIRE PACKAGE====\n" + go test -v -coverprofile=coverage.out -count=1 ./... + +# Clone test fixtures and prepare test environment +test-setup: + mkdir -p $CODE_PATH + cd $CODE_PATH && ls -A1 | xargs rm -rf + git clone https://github.com/DeepSourceCorp/july $CODE_PATH + chmod +x /tmp/deepsource + cp ./command/report/tests/golden_files/python_coverage.xml /tmp + +# Remove build artifacts and coverage files +clean: + rm -rf /tmp/deepsource coverage.out dist/ + +# Bump patch version (x.y.Z) +bump-patch: + #!/usr/bin/env bash + set -euo pipefail + current=$(cat VERSION) + IFS='.' read -r major minor patch <<< "$current" + new="${major}.${minor}.$((patch + 1))" + echo "$new" > VERSION + git add VERSION + git commit -m "Bump version to ${new}" + echo "Bumped ${current} -> ${new}" + +# Bump minor version (x.Y.0) +bump-minor: + #!/usr/bin/env bash + set -euo pipefail + current=$(cat VERSION) + IFS='.' read -r major minor patch <<< "$current" + new="${major}.$((minor + 1)).0" + echo "$new" > VERSION + git add VERSION + git commit -m "Bump version to ${new}" + echo "Bumped ${current} -> ${new}" + +# Bump major version (X.0.0) +bump-major: + #!/usr/bin/env bash + set -euo pipefail + current=$(cat VERSION) + IFS='.' read -r major minor patch <<< "$current" + new="$((major + 1)).0.0" + echo "$new" > VERSION + git add VERSION + git commit -m "Bump version to ${new}" + echo "Bumped ${current} -> ${new}" + +# Tag and push a production release +deploy-prod: + #!/usr/bin/env bash + set -euo pipefail + version=$(cat VERSION) + tag="v${version}" + echo "Creating tag ${tag}..." + git tag -a "${tag}" -m "Release ${tag}" + git push origin "${tag}" + echo "Pushed ${tag} — build-and-deploy workflow triggered" + +# Tag and push a dev release (version + commit hash) +deploy-dev: + #!/usr/bin/env bash + set -euo pipefail + version=$(cat VERSION) + hash=$(git rev-parse --short HEAD) + tag="v${version}-${hash}" + echo "Creating dev tag ${tag}..." + git tag -a "${tag}" -m "Dev release ${tag}" + git push origin "${tag}" + echo "Pushed ${tag} — dev build-and-deploy workflow triggered" diff --git a/schema.graphql b/schema.graphql new file mode 100644 index 00000000..8f92dabf --- /dev/null +++ b/schema.graphql @@ -0,0 +1,2676 @@ +"""An issue occurrence found during code analysis.""" +type AnalysisIssue { + """Unique identifier for this issue occurrence.""" + id: ID! + + """Source of this issue.""" + source: AnalysisIssueSource! + + """File path where the issue was found.""" + path: String! + + """Severity of the issue.""" + severity: IssueSeverity! + + """Category of the issue.""" + category: IssueCategory! + + """Description of the issue.""" + title: String + + """Detailed explanation of the issue.""" + explanation: String + + """Whether this issue is suppressed.""" + isSuppressed: Boolean! + + """Starting line number.""" + beginLine: Int! + + """Starting column number.""" + beginColumn: Int! + + """Ending line number.""" + endLine: Int! + + """Ending column number.""" + endColumn: Int! + + """Time when the issue was first detected.""" + createdAt: DateTime! + + """Time when the issue was last modified.""" + modifiedAt: DateTime! + + """Issue shortcode. Null for AI issues.""" + shortcode: String + + """The issue definition. Null for AI issues.""" + issue: Issue +} + +"""Source of an analysis issue.""" +enum AnalysisIssueSource { + STATIC + AI +} + +"""An enumeration.""" +enum IssueSeverity { + CRITICAL + MAJOR + MINOR +} + +"""An enumeration.""" +enum IssueCategory { + ANTI_PATTERN + BUG_RISK + PERFORMANCE + SECURITY + COVERAGE + TYPECHECK + STYLE + DOCUMENTATION + SECRETS +} + +""" +The `DateTime` scalar type represents a DateTime +value as specified by +[iso8601](https://en.wikipedia.org/wiki/ISO_8601). +""" +scalar DateTime + +type ComplianceReport implements Report { + """The key of the report.""" + key: ReportKey + + """The title of the report.""" + title: String + + """The current value of the report.""" + currentValue: Int + + """The status of the report.""" + status: ReportStatus + + """The historical values for this report.""" + historicalValues(startDate: Date!, endDate: Date!): [HistoricalValueItem] + + """The trends associated with this report.""" + trends: [Trend] + + """The compliance issue stats associated with this report.""" + complianceIssueStats: [ComplianceIssueStat] +} + +interface Report { + """The key of the report.""" + key: ReportKey + + """The title of the report.""" + title: String + + """The current value of the report.""" + currentValue: Int + + """The status of the report.""" + status: ReportStatus + + """The historical values for this report.""" + historicalValues(startDate: Date!, endDate: Date!): [HistoricalValueItem] + + """The trends associated with this report.""" + trends: [Trend] +} + +"""All possible report keys.""" +enum ReportKey { + OWASP_TOP_10 + SANS_TOP_25 + MISRA_C + CODE_COVERAGE + CODE_HEALTH_TREND + ISSUE_DISTRIBUTION + ISSUES_PREVENTED + ISSUES_AUTOFIXED +} + +"""The different statuses possible for a report.""" +enum ReportStatus { + PASSING + FAILING + NOOP +} + +type HistoricalValueItem { + """The date of the historical value.""" + date: Date + + """The values associated with this item.""" + values: [HistoricalValue] +} + +""" +The `Date` scalar type represents a Date +value as specified by +[iso8601](https://en.wikipedia.org/wiki/ISO_8601). +""" +scalar Date + +type HistoricalValue { + """The key associated with the value.""" + key: String + + """The value.""" + value: Int +} + +"""Represents a trend for a report.""" +type Trend { + """The label associated with the trend.""" + label: String + + """The value of the trend.""" + value: Int + + """The percentage change in the trend.""" + rate: Float @deprecated(reason: "Deprecated in favor of `changePercentage`.") + + """The percentage change in the trend.""" + changePercentage: Float +} + +type ComplianceIssueStat { + """The key for this stat.""" + key: String + + """The title for this stat.""" + title: String + + """The occurrence count of the compliance issue.""" + occurrence: ComplianceIssueOccurrenceCount +} + +type ComplianceIssueOccurrenceCount { + """The count of critical severity issues.""" + critical: Int + + """The count of major severity issues.""" + major: Int + + """The count of minor severity issues.""" + minor: Int + + """The total count of issues.""" + total: Int +} + +type InsightReport implements Report { + """The key of the report.""" + key: ReportKey + + """The title of the report.""" + title: String + + """The current value of the report.""" + currentValue: Int + + """The status of the report.""" + status: ReportStatus + + """The historical values for this report.""" + historicalValues(startDate: Date!, endDate: Date!): [HistoricalValueItem] + + """The trends associated with this report.""" + trends: [Trend] +} + +type Issue implements MaskPrimaryKeyNode { + shortcode: String! + title: String! + analyzer: Analyzer! + autofixAvailable: Boolean! + autofixAiAvailable: Boolean + isRecommended: Boolean! + + """The ID of the object""" + id: ID! + + """Category of the issue.""" + category: IssueCategory! + + """Severity of the issue.""" + severity: IssueSeverity! + + """The description of the issue in markdown.""" + description: String! + + """A short description of the issue.""" + shortDescription: String! + + """A list of tags associated with the issue.""" + tags: [String]! +} + +"""Custom node class to prevent leaking primary keys as integers""" +interface MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! +} + +"""A DeepSource Analyzer.""" +type Analyzer implements MaskPrimaryKeyNode { + """Version of the image used for this analyzer.""" + version: String! + + """Unique identifier for this analyzer globally.""" + shortcode: String! + + """Human-friendly name for this analyzer.""" + name: String! + + """Verbose description, written in Markdown.""" + description: String! + + """ + Schema of the meta fields accepted by the analyzer in .deepsource.toml. + """ + metaSchema: JSONString! + exampleConfig: String + + """The ID of the object""" + id: ID! + logo: String + numIssues: Int + issues(offset: Int, before: String, after: String, first: Int, last: Int): IssueConnection + + """Get a specific issue by its shortcode.""" + issue( + """Shortcode of the issue.""" + shortcode: String! + ): Issue + issueDistribution: [IssueDistributionItem] + type: AnalyzerType +} + +""" +Allows use of a JSON String for input / output from the GraphQL schema. + +Use of this type is *not recommended* as you lose the benefits of having a defined, static +schema (one of the key benefits of GraphQL). +""" +scalar JSONString + +type IssueConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [IssueEdge]! + totalCount: Int +} + +""" +The Relay compliant `PageInfo` type, containing data necessary to paginate this connection. +""" +type PageInfo { + """When paginating forwards, are there more items?""" + hasNextPage: Boolean! + + """When paginating backwards, are there more items?""" + hasPreviousPage: Boolean! + + """When paginating backwards, the cursor to continue.""" + startCursor: String + + """When paginating forwards, the cursor to continue.""" + endCursor: String +} + +"""A Relay edge containing a `Issue` and its cursor.""" +type IssueEdge { + """The item at the end of the edge""" + node: Issue + + """A cursor for use in pagination""" + cursor: String! +} + +type IssueDistributionItem { + category: IssueCategory! + title: String! + count: Int! +} + +"""An enumeration.""" +enum AnalyzerType { + CORE + COMMUNITY + CUSTOM +} + +type IssueDistributionReport implements Report { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The status of the report.""" + status: ReportStatus @deprecated(reason: "Report doesn't have a status.") + + """The historical values for this report.""" + historicalValues(startDate: Date!, endDate: Date!): [HistoricalValueItem] @deprecated(reason: "Deprecated in favor of `values`.") + + """The trends associated with this report.""" + trends: [Trend]! + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + issueDistributionByAnalyzer: [IssueDistribution] + issueDistributionByCategory: [IssueDistribution] +} + +"""Represents the values recorded on a specific date.""" +type ReportValueItem { + """The date when the values were recorded.""" + date: Date! + + """The values recorded on the given date.""" + values: [ReportValue] +} + +"""Represents a value recorded for a report.""" +type ReportValue { + """The key associated with the value.""" + key: String! + + """The value.""" + value: Int! +} + +type IssueDistribution { + key: String + value: Int +} + +"""A metric tracked by an analyzer.""" +type Metric implements MaskPrimaryKeyNode & MetricDefinition { + """The ID of the object""" + id: ID! + + """The metric's name.""" + name: String! + + """The metric's unique identifier.""" + shortcode: MetricShortcode! + + """The metric's description in markdown format.""" + description: String! + + """Direction which can be considered positive for the metric.""" + positiveDirection: Direction! + + """Unit suffix to apply to the metric value.""" + unit: String + + """Lower bound for the metric value.""" + minValueAllowed: Int + + """Upper bound for the metric value.""" + maxValueAllowed: Int +} + +"""A metric's definition.""" +interface MetricDefinition { + """The metric's name.""" + name: String! + + """The metric's unique identifier.""" + shortcode: MetricShortcode! + + """The metric's description in markdown format.""" + description: String! + + """Direction which can be considered positive for the metric.""" + positiveDirection: Direction! + + """Unit suffix to apply to the metric value.""" + unit: String + + """Lower bound for the metric value.""" + minValueAllowed: Int + + """Upper bound for the metric value.""" + maxValueAllowed: Int +} + +"""Represents the various metric types.""" +enum MetricShortcode { + BCV + CCV + DCV + DDP + LCV + CPCV + NLCV + NBCV + NCCV + NCPCV +} + +"""Represents the direction of a value.""" +enum Direction { + UPWARD + DOWNWARD +} + +""" +Statistics pertaining to the changeset (of a commit or PR), as analyzed by an `AnalysisRun`. +""" +type ChangesetStats { + """Stats for number of lines in the changeset.""" + lines: ChangesetStatsCounts! + + """Stats for number of branches in the changeset.""" + branches: ChangesetStatsCounts! + + """Stats for number of conditions in the changeset.""" + conditions: ChangesetStatsCounts! +} + +""" +Overall and newly added number of lines (or branches or conditions) in a changeset. +""" +type ChangesetStatsCounts { + "\n Overall number of lines (or branches or conditions) across the repository.\n Note: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n " + overall: Int + + "\n Overall number of lines (or branches or conditions) that are covered across the repository.\",\n Note: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n " + overallCovered: Int + + "Newly added number of lines (or branches or conditions) in the changeset.\nNote: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n " + new: Int + + "\n Newly added number of lines (or branches or conditions) that are covered in the changeset.\n Note: `0` depicts no lines (or branches or conditions) were found whereas `None` depicts the information is not available.\n " + newCovered: Int +} + +"""A Pull Request on DeepSource.""" +type PullRequest implements MaskPrimaryKeyNode { + createdAt: DateTime! + baseBranch: String + branch: String + number: Int + title: String + + """The ID of the object""" + id: ID! + + """Current state of this pull request (open/closed)""" + state: PRState! + + """URL of the PR on the VCS provider's website""" + vcsUrl: String! + + """Summary of issues and vulnerabilities for this PR""" + summary: PRSummary! + + """Latest analysis run on this pull request""" + latestAnalysisRun: AnalysisRun + + """All issue occurrences from the latest analysis run on this PR""" + issueOccurrences(offset: Int, before: String, after: String, first: Int, last: Int, analyzerIn: [String]): OccurrenceConnection! + + """All vulnerability occurrences from the latest analysis run on this PR""" + vulnerabilityOccurrences(offset: Int, before: String, after: String, first: Int, last: Int): VulnerabilityOccurrenceConnection! + + """List of DeepSource metrics captured in the latest run""" + metrics: [RepositoryMetric!] +} + +"""State of a pull request.""" +enum PRState { + OPEN + CLOSED +} + +"""Summary counts for a pull request.""" +type PRSummary { + """Total issues raised by this pull request""" + issuesRaised: Int + + """Total issues resolved by this pull request""" + issuesResolved: Int + + """Total issues suppressed in this pull request""" + issuesSuppressed: Int + + """Total vulnerabilities raised by this pull request""" + vulnerabilitiesRaised: Int +} + +type AnalysisRun implements MaskPrimaryKeyNode { + createdAt: DateTime! + branchName: String + baseOid: String + commitOid: String + finishedAt: DateTime + repository: Repository! + + """The ID of the object""" + id: ID! + + """UID of this AnalysisRun.""" + runUid: UUID! + + """The current status of the run.""" + status: AnalysisRunStatus! + + """Summary of the analysis run""" + summary: AnalysisRunSummary! + + """Time when the analysis run was last modified""" + updatedAt: DateTime! + + """Analyzer checks in the analysis run.""" + checks(offset: Int, before: String, after: String, first: Int, last: Int, analyzerIn: [String]): CheckConnection + + """ + Statistics pertaining to the changeset (of a commit or PR) in the analysis run. + """ + changesetStats: ChangesetStats + + """Report Card for the Run with quality dimension grades and scores""" + reportCard: RunReport + + """SCA checks for OSS dependency analysis in this run.""" + scaChecks(offset: Int, before: String, after: String, first: Int, last: Int): SCACheckConnection +} + +type Repository implements MaskPrimaryKeyNode { + """The name of this repository.""" + name: String! + + """Object ID of the latest commit on the default branch.""" + latestCommitOid: String + isPrivate: Boolean! + isActivated: Boolean! + + """The ID of the object""" + id: ID! + + """The account under which this repository exists.""" + account: Account! + + """Past analysis runs for the repository""" + analysisRuns(offset: Int, before: String, after: String, first: Int, last: Int): AnalysisRunConnection + + """ + The `.deepsource.toml` config of the repository represented as a JSON object. + """ + configJson: JSON + + """The default base branch of the repository on DeepSource.""" + defaultBranch: String + + """The DSN for this repository.""" + dsn: String + + """Get all the analyzers enabled in this repository.""" + enabledAnalyzers(offset: Int, before: String, after: String, first: Int, last: Int): AnalyzerConnection + + """ + Get all issues raised in the default branch of this repository. Specifying a path would only return those issues whose occurrences are present in the file at path. + """ + issues( + """Show issues for this path only.""" + path: String + offset: Int + before: String + after: String + first: Int + last: Int + tags: [String] + analyzerIn: [String] + ): RepositoryIssueConnection + + """All issue occurrences in the default branch.""" + issueOccurrences(offset: Int, before: String, after: String, first: Int, last: Int, analyzerIn: [String]): OccurrenceConnection + + """List of dependency vulnerability occurrences in the default branch.""" + dependencyVulnerabilityOccurrences(offset: Int, before: String, after: String, first: Int, last: Int): VulnerabilityOccurrenceConnection + + """Get a dependency vulnerability occurrence by its ID.""" + dependencyVulnerabilityOccurrence(id: ID!): VulnerabilityOccurrence! + + """Get a specific pull request by its number on the VCS provider""" + pullRequest( + """Pull request number on the VCS provider""" + number: Int! + ): PullRequest + + """Get a specific repository target.""" + target(id: ID!): RepositoryTarget! + + """List of repository targets for this repository.""" + targets(offset: Int, before: String, after: String, first: Int, last: Int): RepositoryTargetConnection! + + """Get a report associated with this repository""" + report( + """Get the report associated with the report key""" + key: ReportKey! + ): Report! @deprecated(reason: "Deprecated in favor of `reports`.") + + """Namespace containing all available reports.""" + reports: RepositoryReportsNamespace! + + """VCS Provider of the repository.""" + vcsProvider: VCSProvider! + + """URL of the repository on the VCS.""" + vcsUrl: String! + + """List of all DeepSource metrics.""" + metrics( + """List of metric shortcodes to filter on.""" + shortcodeIn: [MetricShortcode] + ): [RepositoryMetric!]! + + """List of `IgnoreRule`s that exist for the repository.""" + ignoreRules(offset: Int, before: String, after: String, first: Int, last: Int, issueShortcode: String, filePath: String): IgnoreRuleConnection + + """Issue categories configuration for the repository.""" + issueCategorySettings: [IssueCategorySetting!]! + + """Issue priority configuration for the repository.""" + issuePrioritySettings: [IssuePrioritySetting!]! + + """Metric settings for the repository.""" + metricSettings: [MetricSetting!]! + + """ + Whether the account has allowed Autofix AI to run on private repositories. + """ + allowAutofixAi: Boolean! + + """Whether to use the legacy autofix engine.""" + useLegacyAutofix: Boolean! +} + +type Account implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """The unique identifier (or username) of the account.""" + login: String! + + """The account type (individual or team).""" + type: AccountType! + + """VCS Provider of the account.""" + vcsProvider: VCSProvider! + + """Whether the account is a beta tester""" + isBetaTester: Boolean! + + """URL for the account's public avatar.""" + avatarUrl: String + + """Get a report associated with this account""" + report( + """Get the report associated with the report key""" + key: ReportKey! + ): Report! @deprecated(reason: "Deprecated in favor of `reports`.") + + """Namespace containing all available reports.""" + reports: AccountReportsNamespace! + + """URL for the account on the VCS Provider.""" + vcsUrl: String + + """ + Get all repositories accessible to the current user under the given account. + """ + repositories(offset: Int, before: String, after: String, first: Int, last: Int): RepositoryConnection! + + """Members of the team. This is an empty list for an individual account.""" + members(offset: Int, before: String, after: String, first: Int, last: Int): TeamMemberConnection! + + """Subscription and billing details of the account.""" + subscription: AccountSubscription! + + """Suppressed issues on the account/team.""" + suppressedIssues(offset: Int, before: String, after: String, first: Int, last: Int, issueShortcode: String): TeamSuppressedIssueConnection! +} + +enum AccountType { + """A individual account.""" + INDIVIDUAL + + """A team account.""" + TEAM +} + +"""An enumeration.""" +enum VCSProvider { + GITHUB + GITLAB + BITBUCKET + BITBUCKET_DATACENTER + GITHUB_ENTERPRISE + GSR + ADS +} + +"""Namespace containing all the reports available for an `Account`""" +type AccountReportsNamespace { + owaspTop10: OwaspTop10Report! + sansTop25: SansTop25Report! + misraC: MisraCReport! + codeCoverage: CodeCoverageReport! + codeHealthTrend: CodeHealthTrendReport! + issueDistribution: IssueDistributionReport! + issuesPrevented: IssuesPreventedReport! + issuesAutofixed: IssuesAutofixedReport! +} + +"""The OWASP Top 10 report.""" +type OwaspTop10Report { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + + """The trends associated with this report.""" + trends: [Trend]! + + """The status of the report.""" + status: ReportStatus! + + """The compliance issue stats associated with this report.""" + securityIssueStats: [SecurityIssueStat]! +} + +type SecurityIssueStat { + """The key for this stat.""" + key: String! + + """The title for this stat.""" + title: String! + + """The severity distribution for this stat.""" + occurrence: SeverityDistribution! +} + +"""Distribution of severity count.""" +type SeverityDistribution { + """The count of critical severity issues.""" + critical: Int + + """The count of major severity issues.""" + major: Int + + """The count of minor severity issues.""" + minor: Int + + """The total count of issues.""" + total: Int +} + +"""The SANS Top 25 report.""" +type SansTop25Report { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + + """The trends associated with this report.""" + trends: [Trend]! + + """The status of the report.""" + status: ReportStatus! + + """The compliance issue stats associated with this report.""" + securityIssueStats: [SecurityIssueStat]! +} + +"""The MISRA-C report.""" +type MisraCReport { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + + """The trends associated with this report.""" + trends: [Trend]! + + """The status of the report.""" + status: ReportStatus! + + """The compliance issue stats associated with this report.""" + securityIssueStats: [SecurityIssueStat]! +} + +"""The Code Coverage report.""" +type CodeCoverageReport { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The list of repositories.""" + repositories( + """The query string to search for repositories.""" + q: String + + """The sort key to sort the repositories results by.""" + sortKey: CodeCoverageReportRepositorySortKey + offset: Int + before: String + after: String + first: Int + last: Int + ): CodeCoverageReportRepositoryConnection +} + +type CodeCoverageReportRepositoryConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [CodeCoverageReportRepositoryEdge]! + totalCount: Int +} + +""" +A Relay edge containing a `CodeCoverageReportRepository` and its cursor. +""" +type CodeCoverageReportRepositoryEdge { + """The item at the end of the edge""" + node: CodeCoverageReportRepository + + """A cursor for use in pagination""" + cursor: String! +} + +"""Representation of a `Repository` in the Code Coverage report.""" +type CodeCoverageReportRepository implements MaskPrimaryKeyNode { + """The name of this repository.""" + name: String! + + """The ID of the object""" + id: ID! + + """The LCV metric value for this repository.""" + lcvMetricValue: Float + + """The BCV metric value for this repository.""" + bcvMetricValue: Float + + """Whether the LCV value is passing.""" + isLcvPassing: Boolean + + """Whether the BCV value is passing.""" + isBcvPassing: Boolean +} + +""" +Possible options to sort the list of repositories in the Code Coverage report. +""" +enum CodeCoverageReportRepositorySortKey { + LCV_ASCENDING + LCV_DESCENDING + BCV_ASCENDING + BCV_DESCENDING +} + +"""The Code Health Trend report.""" +type CodeHealthTrendReport { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + + """The trends associated with this report.""" + trends: [Trend]! +} + +"""The Issues Prevented report.""" +type IssuesPreventedReport { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + + """The trends associated with this report.""" + trends: [Trend]! +} + +"""The Issues Autofixed report.""" +type IssuesAutofixedReport { + """The key of the report.""" + key: ReportKey! + + """The title of the report.""" + title: String! + + """The current value of the report.""" + currentValue: Int + + """The report values for this report.""" + values( + """The start date to get the report values.""" + startDate: Date! + + """The start date to get the report values.""" + endDate: Date! + ): [ReportValueItem]! + + """The trends associated with this report.""" + trends: [Trend]! +} + +type RepositoryConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [RepositoryEdge]! + totalCount: Int +} + +"""A Relay edge containing a `Repository` and its cursor.""" +type RepositoryEdge { + """The item at the end of the edge""" + node: Repository + + """A cursor for use in pagination""" + cursor: String! +} + +type TeamMemberConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [TeamMemberEdge]! + totalCount: Int +} + +"""A Relay edge containing a `TeamMember` and its cursor.""" +type TeamMemberEdge { + """The item at the end of the edge""" + node: TeamMember + + """A cursor for use in pagination""" + cursor: String! +} + +"""Represents a user within a team.""" +type TeamMember implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """The User instance.""" + user: User! + + """The role this user has in the team.""" + role: TeamMemberRole! + + """Whether this user is the owner of the team.""" + isOwner: Boolean! + + """The time when this user joined the team.""" + joinedAt: DateTime! +} + +type User implements MaskPrimaryKeyNode { + firstName: String! + lastName: String! + email: String! + + """The ID of the object""" + id: ID! + + """ + All the accounts associated with the user. This includes the team accounts the user is part of and the individual accounts they have added on DeepSource. + """ + accounts(offset: Int, before: String, after: String, first: Int, last: Int): AccountConnection + + """The anonymous ID used for analytics and tracking.""" + analyticsId: String! + + """Whether the user is a beta tester.""" + isBetaTester: Boolean! + + """The IDE subscription associated with the user.""" + ideSubscription: IDESubscription +} + +type AccountConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [AccountEdge]! + totalCount: Int +} + +"""A Relay edge containing a `Account` and its cursor.""" +type AccountEdge { + """The item at the end of the edge""" + node: Account + + """A cursor for use in pagination""" + cursor: String! +} + +"""Subscription details of an DeepSource IDE subscription.""" +type IDESubscription { + """The plan of this subscription.""" + plan: IDESubscriptionPlan! +} + +"""Represents DeepSource's IDE subscription plans.""" +enum IDESubscriptionPlan { + FREE + PRO + SPONSORED +} + +"""An enumeration.""" +enum TeamMemberRole { + ADMIN + MEMBER + CONTRIBUTOR +} + +"""Subscription and billing details of an `Account`.""" +type AccountSubscription { + """The plan associated with this account's subscription.""" + plan: AccountSubscriptionPlan! +} + +"""Represents DeepSource's subscription plans.""" +enum AccountSubscriptionPlan { + FREE + STARTER + BUSINESS + ENTERPRISE +} + +type TeamSuppressedIssueConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [TeamSuppressedIssueEdge]! + totalCount: Int +} + +"""A Relay edge containing a `TeamSuppressedIssue` and its cursor.""" +type TeamSuppressedIssueEdge { + """The item at the end of the edge""" + node: TeamSuppressedIssue + + """A cursor for use in pagination""" + cursor: String! +} + +""" +A `TeamSuppressedIssue` represents an issue from an analyzer that has been suppressed on the team level. This is +a global suppression that affects all repositories in the team. +""" +type TeamSuppressedIssue implements MaskPrimaryKeyNode { + createdAt: DateTime! + + """The ID of the object""" + id: ID! + + """The `Issue` that is suppressed.""" + issue: Issue! + + """The user who suppressed the issue.""" + user: User +} + +type AnalysisRunConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [AnalysisRunEdge]! + totalCount: Int +} + +"""A Relay edge containing a `AnalysisRun` and its cursor.""" +type AnalysisRunEdge { + """The item at the end of the edge""" + node: AnalysisRun + + """A cursor for use in pagination""" + cursor: String! +} + +"""A JSON object.""" +scalar JSON + +type AnalyzerConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [AnalyzerEdge]! + totalCount: Int +} + +"""A Relay edge containing a `Analyzer` and its cursor.""" +type AnalyzerEdge { + """The item at the end of the edge""" + node: Analyzer + + """A cursor for use in pagination""" + cursor: String! +} + +type RepositoryIssueConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [RepositoryIssueEdge]! + totalCount: Int +} + +"""A Relay edge containing a `RepositoryIssue` and its cursor.""" +type RepositoryIssueEdge { + """The item at the end of the edge""" + node: RepositoryIssue + + """A cursor for use in pagination""" + cursor: String! +} + +type RepositoryIssue implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """Definition of the issue that has been raised.""" + issue: Issue! + + """All occurrences of this issue in the default branch.""" + occurrences(offset: Int, before: String, after: String, first: Int, last: Int, analyzerIn: [String]): OccurrenceConnection + + """The repository for which this issue has been raised.""" + repository: Repository! +} + +type OccurrenceConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [OccurrenceEdge]! + totalCount: Int +} + +"""A Relay edge containing a `Occurrence` and its cursor.""" +type OccurrenceEdge { + """The item at the end of the edge""" + node: Occurrence + + """A cursor for use in pagination""" + cursor: String! +} + +type Occurrence implements MaskPrimaryKeyNode { + path: String! + beginLine: Int! + beginColumn: Int! + endLine: Int! + endColumn: Int! + + """The ID of the object""" + id: ID! + + """The definition of the issue which has been raised.""" + issue: Issue! + + """Title describing the issue which has been raised here.""" + title: String! +} + +type VulnerabilityOccurrenceConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [VulnerabilityOccurrenceEdge]! + totalCount: Int +} + +"""A Relay edge containing a `VulnerabilityOccurrence` and its cursor.""" +type VulnerabilityOccurrenceEdge { + """The item at the end of the edge""" + node: VulnerabilityOccurrence + + """A cursor for use in pagination""" + cursor: String! +} + +type VulnerabilityOccurrence implements MaskPrimaryKeyNode { + """The reachability of the vulnerability occurrence.""" + reachability: VulnerabilityOccurrenceReachability! + + """The fixability of the vulnerability occurrence.""" + fixability: VulnerabilityOccurrenceFixability! + + """The vulnerability.""" + vulnerability: Vulnerability! + + """The ID of the object""" + id: ID! + + """The package associated with the vulnerability occurrence.""" + package: Package! + + """The package version associated with the vulnerability occurrence.""" + packageVersion: PackageVersion! +} + +"\n The reachability type of the vulnerability occurrence\n " +enum VulnerabilityOccurrenceReachability { + REACHABLE + UNREACHABLE + UNKNOWN +} + +"\n The fixability type of the vulnerability occurrence\n " +enum VulnerabilityOccurrenceFixability { + ERROR + UNFIXABLE + GENERATING_FIX + POSSIBLY_FIXABLE + MANUALLY_FIXABLE + AUTO_FIXABLE +} + +type Vulnerability implements MaskPrimaryKeyNode { + """CVSS v2 vector""" + cvssV2Vector: String + + """CVSS v2 base score""" + cvssV2BaseScore: Float + + """Severity based on the CVSSv2 base score.""" + cvssV2Severity: VulnerabilitySeverity + + """CVSS v3 vector""" + cvssV3Vector: String + + """CVSS v3 base score""" + cvssV3BaseScore: Float + + """Severity based on the CVSSv3 base score.""" + cvssV3Severity: VulnerabilitySeverity + + """CVSS v4 vector""" + cvssV4Vector: String + + """CVSS v4 base score""" + cvssV4BaseScore: Float + + """Severity based on the CVSSv4 base score.""" + cvssV4Severity: VulnerabilitySeverity + + """Overall implied severity.""" + severity: VulnerabilitySeverity + + """The identifier of the vulnerability""" + identifier: String! + + """The aliases of the vulnerability""" + aliases: [String!]! + + """The summary of the vulnerability""" + summary: String + + """The details of the vulnerability""" + details: String + + """The time when the vulnerability was published""" + publishedAt: DateTime! + + """The time when the vulnerability was updated""" + updatedAt: DateTime! + + """The time when the vulnerability was withdrawn""" + withdrawnAt: DateTime + + """The EPSS score of the vulnerability""" + epssScore: Float + + """The EPSS percentile of the vulnerability""" + epssPercentile: Float + + """Introduced versions.""" + introducedVersions: [String]! + + """Fixed versions.""" + fixedVersions: [String]! + + """The ID of the object""" + id: ID! + + """Reference URLs.""" + referenceUrls: [String]! +} + +"""The severity of the vulnerability.""" +enum VulnerabilitySeverity { + NONE + LOW + MEDIUM + HIGH + CRITICAL +} + +type Package implements MaskPrimaryKeyNode { + """The ecosystem of the package.""" + ecosystem: Ecosystem! + + """Name of the package""" + name: String! + + """The package URL""" + purl: String + + """The ID of the object""" + id: ID! +} + +enum Ecosystem { + NPM + PYPI + MAVEN + GO + RUBYGEMS + NUGET + PACKAGIST + CRATES_IO +} + +type PackageVersion implements MaskPrimaryKeyNode { + """Version of the package""" + version: String! + + """The type of the package version.""" + versionType: PackageVersionType + + """The ID of the object""" + id: ID! +} + +"""The type of the package version.""" +enum PackageVersionType { + SEMVER + ECOSYSTEM + GIT +} + +"""A SCA target in a repository.""" +type RepositoryTarget implements MaskPrimaryKeyNode { + """The ecosystem of the target.""" + ecosystem: Ecosystem! + + """The package manager of the target.""" + packageManager: PackageManager! + manifestPath: String + lockfilePath: String! + + """The source of the target.""" + source: RepositoryTargetSource! + isActivated: Boolean! + + """The ID of the object""" + id: ID! +} + +"\n Package managers supported by DeepSource\n " +enum PackageManager { + REQUIREMENTS_TXT + POETRY + PIPFILE + PDM + UV + NPM + YARN + PNPM + BUN + GRADLE + MAVEN + GO_MOD + RUBY_GEMS + NUGET + PACKAGIST + CARGO +} + +"\n The source of the target.\n " +enum RepositoryTargetSource { + AUTO + CUSTOM +} + +type RepositoryTargetConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [RepositoryTargetEdge]! + totalCount: Int +} + +"""A Relay edge containing a `RepositoryTarget` and its cursor.""" +type RepositoryTargetEdge { + """The item at the end of the edge""" + node: RepositoryTarget + + """A cursor for use in pagination""" + cursor: String! +} + +"""Namespace containing all the reports available for an `Repository`""" +type RepositoryReportsNamespace { + owaspTop10: OwaspTop10Report! + sansTop25: SansTop25Report! + misraC: MisraCReport! + codeHealthTrend: CodeHealthTrendReport! + issueDistribution: IssueDistributionReport! + issuesPrevented: IssuesPreventedReport! + issuesAutofixed: IssuesAutofixedReport! +} + +"""A Metric's manifestation specific to a repository.""" +type RepositoryMetric implements MetricDefinition { + """The metric's name.""" + name: String! + + """The metric's unique identifier.""" + shortcode: MetricShortcode! + + """The metric's description in markdown format.""" + description: String! + + """Direction which can be considered positive for the metric.""" + positiveDirection: Direction! + + """Unit suffix to apply to the metric value.""" + unit: String + + """Lower bound for the metric value.""" + minValueAllowed: Int + + """Upper bound for the metric value.""" + maxValueAllowed: Int + + """Whether this metric is enabled for reporting in the repository.""" + isReported: Boolean! + + """ + Whether to fail checks when thresholds are not met for the metric in the repository. + """ + isThresholdEnforced: Boolean! + + """Items in the repository metric.""" + items: [RepositoryMetricItem!]! +} + +"""An item in the `RepositoryMetric`.""" +type RepositoryMetricItem implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """Distinct key representing the metric in the repository.""" + key: MetricKey! + + """ + Threshold value for the metric, customizable by the user. Null if no threshold is set. + """ + threshold: Int + + """ + Latest value captured for this metric on the repository's default branch. + """ + latestValue: Float + + """ + Latest value captured for this metric on the repository's default branch. Suffixed with the unit and returned as a human-readable string. + """ + latestValueDisplay: String + + """ + The status of the threshold condition for the latest metric value on the repository's default branch. + """ + thresholdStatus: MetricThresholdStatus + + """ + All values captured for this metric in the repository's default branch. + """ + values(offset: Int, before: String, after: String, first: Int, last: Int, commitOidIn: [String]): MetricValueConnection +} + +"""Represents the key for which the metric is recorded in a repository.""" +enum MetricKey { + AGGREGATE + C_AND_CPP + CSHARP + GO + JAVA + JAVASCRIPT + PHP + PYTHON + RUBY + RUST + SCALA + KOTLIN + SWIFT +} + +""" +Represents the status of the threshold condition for a particular metric value. +""" +enum MetricThresholdStatus { + PASSING + FAILING +} + +type MetricValueConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [MetricValueEdge]! + totalCount: Int +} + +"""A Relay edge containing a `MetricValue` and its cursor.""" +type MetricValueEdge { + """The item at the end of the edge""" + node: MetricValue + + """A cursor for use in pagination""" + cursor: String! +} + +"""An individual value captured for a RepositoryMetric.""" +type MetricValue implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """Metric value reported by the analyzer.""" + value: Float! + + """Value suffixed with the unit of the metric.""" + valueDisplay: String! + + """ + Threshold value for the metric when this value was reported. Null if no threshold was set. + """ + threshold: Int + + """The status of the threshold condition for the metric value.""" + thresholdStatus: MetricThresholdStatus + + """Commit SHA for which this value was recorded on the repository.""" + commitOid: String! + + """The time at which the value was captured.""" + createdAt: DateTime! +} + +type IgnoreRuleConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [IgnoreRuleEdge]! + totalCount: Int +} + +"""A Relay edge containing a `IgnoreRule` and its cursor.""" +type IgnoreRuleEdge { + """The item at the end of the edge""" + node: IgnoreRule + + """A cursor for use in pagination""" + cursor: String! +} + +""" +An `IgnoreRule` defines the condition on which to suppress an `Issue`'s `Occurrence`s in a `Repository`. +""" +type IgnoreRule implements MaskPrimaryKeyNode { + createdAt: DateTime! + + """The ID of the object""" + id: ID! + + """Ignore level of the rule.""" + level: IgnoreRuleLevel! + + """Ignore type of the rule.""" + type: IgnoreRuleType! + + """The `Issue` to ignore in the rule.""" + issue: Issue! + + """File path if rule is on `FILE` level.""" + filePath: String + + """Glob pattern if rule is of `PATTERN` type.""" + globPattern: String +} + +""" +Represents the level of an `IgnoreRule`. +- `REPOSITORY`: suppress the issue for all files in the repository. +- `FILE`: suppress the issue for the given file path in the repository. +""" +enum IgnoreRuleLevel { + REPOSITORY + FILE +} + +""" +Represents the type of an `IgnoreRule`. +- `FOREVER`: suppress the issue in the repository always. +- `PATTERN`: suppress the issue occurrences matching the given glob pattern in the repository. +- `TEST_PATTERN`: suppress the issue occurrences matching the repository's specified test patterns in the repository. +""" +enum IgnoreRuleType { + FOREVER + PATTERN + TEST_PATTERN +} + +""" +Configuration for an `IssueCategory` in a `Repository`. Also known as Quality Gates. +""" +type IssueCategorySetting { + """An `IssueCategory`.""" + category: IssueCategory! + + """ + Whether issues of given category are enabled for reporting in the repository. + """ + isReported: Boolean! + + """ + Whether to fail checks when occurrence(s) of issues of given category are found in the repository. + """ + canFailCheck: Boolean! +} + +"""Configuration for an `IssuePriorityType` in a `Repository`.""" +type IssuePrioritySetting { + """A `IssuePriority`.""" + priorityType: IssuePriorityType! + + """ + Whether issues of given priority are enabled for reporting in the repository. + """ + isReported: Boolean! + + """ + Whether to fail checks when occurrence(s) of issues of given priority are found in the repository. + """ + canFailCheck: Boolean! +} + +"""Enum for issue priority type.""" +enum IssuePriorityType { + LOW + MEDIUM + HIGH +} + +""" +Configuration for a `Metric` in a `Repository`. Also known as Quality Gates. +""" +type MetricSetting { + """The metric's unique identifier.""" + metricShortcode: MetricShortcode! + + """Whether the metric is enabled for reporting in the repository.""" + isReported: Boolean! + + """ + Whether to fail checks when the metric's thresholds are not met in the repository. + """ + isThresholdEnforced: Boolean! +} + +""" +Leverages the internal Python implementation of UUID (uuid.UUID) to provide native UUID objects +in fields, resolvers and input. +""" +scalar UUID + +"""An enumeration.""" +enum AnalysisRunStatus { + PENDING + SUCCESS + FAILURE + TIMEOUT + CANCEL + READY + SKIPPED +} + +type AnalysisRunSummary { + """Number of issues introduced during this analysis run""" + occurrencesIntroduced: Int + + """Number of issues marked as resolved in this analysis run""" + occurrencesResolved: Int + + """Number of issues marked as suppressed in this analysis run""" + occurrencesSuppressed: Int + + """Number of vulnerabilities introduced in this run""" + vulnerabilitiesIntroduced: Int + occurrenceDistributionByAnalyzer: [OccurrenceDistributionByAnalyzer] + occurrenceDistributionByCategory: [OccurrenceDistributionByCategory] +} + +type OccurrenceDistributionByAnalyzer { + """Shortcode of the analyzer""" + analyzerShortcode: String! + + """Number of issues detected by the analyzer""" + introduced: Int! +} + +type OccurrenceDistributionByCategory { + """Category of the issue""" + category: IssueCategory! + + """Number of issues detected that belong to this category""" + introduced: Int! +} + +type CheckConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [CheckEdge]! + totalCount: Int +} + +"""A Relay edge containing a `Check` and its cursor.""" +type CheckEdge { + """The item at the end of the edge""" + node: Check + + """A cursor for use in pagination""" + cursor: String! +} + +"""A single analyzer check as part of an analysis run.""" +type Check implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """Sequence number of the check in the analysis run it belongs to.""" + sequence: Int! + + """The current status of the check.""" + status: CheckStatus! + + """The analyzer related to the check.""" + analyzer: Analyzer! + + """Time when the check was created.""" + createdAt: DateTime! + + """Time when the check was last modified.""" + updatedAt: DateTime! + + """Time when the check finished.""" + finishedAt: DateTime + + """Summary of the check.""" + summary: CheckSummary! + + """Issue occurrences found in the check.""" + occurrences(offset: Int, before: String, after: String, first: Int, last: Int, analyzerIn: [String]): OccurrenceConnection! @deprecated(reason: "Use 'issues' field instead. The 'issues' field returns both static analysis and AI review issues with filtering support.") + + """List of DeepSource metrics captured in the check.""" + metrics: [RepositoryMetric!] + + """All issues found in this check.""" + issues( + source: AnalysisIssueSource + category: IssueCategory + severity: IssueSeverity + + """Filter by shortcode. Only applies to STATIC issues.""" + q: String + before: String + after: String + first: Int + last: Int + ): AnalysisIssueConnection +} + +"""An enumeration.""" +enum CheckStatus { + WAITING + PENDING + SUCCESS + FAILURE + TIMEOUT + CANCEL + READY + NEUTRAL + ARTIFACT_TIMEOUT + SKIPPED +} + +"""Summary of a check.""" +type CheckSummary { + """Number of issues introduced in the check.""" + occurrencesIntroduced: Int + + """Number of issues resolved in the check.""" + occurrencesResolved: Int + + """Number of issues marked as suppressed in the check.""" + occurrencesSuppressed: Int + + """The issue category distribution for the check.""" + occurrenceDistributionByCategory: [OccurrenceDistributionByCategory] +} + +"""Connection for paginated analysis issues.""" +type AnalysisIssueConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [AnalysisIssueEdge]! + + """Total number of issues matching the filter.""" + totalCount: Int +} + +"""A Relay edge containing a `AnalysisIssue` and its cursor.""" +type AnalysisIssueEdge { + """The item at the end of the edge""" + node: AnalysisIssue + + """A cursor for use in pagination""" + cursor: String! +} + +"""Code quality report card for an analysis run.""" +type RunReport implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + createdAt: DateTime! + modifiedAt: DateTime! + + """Current status of the report.""" + status: RunReportStatus! + + """Security dimension score.""" + security: RunReportDimension + + """Reliability dimension score.""" + reliability: RunReportDimension + + """Complexity dimension score.""" + complexity: RunReportDimension + + """Code hygiene dimension score.""" + hygiene: RunReportDimension + + """Test coverage metrics.""" + coverage: RunReportCoverage + + """Aggregate score across all dimensions.""" + aggregate: RunReportAggregate + + """Analysis run metadata.""" + meta: RunReportMetadata + + """Suggested focus area for improvement.""" + focusArea: RunReportFocusArea +} + +"""Status of the Run Report""" +enum RunReportStatus { + IN_PROGRESS + RECOMPUTE + COMPLETED + PENDING +} + +"""A code quality dimension score (e.g., security, reliability).""" +type RunReportDimension { + """Letter grade (A, B, C, or D).""" + grade: RunReportGrade! + + """Numeric score (0-100).""" + score: Int! + + """Number of issues in this dimension.""" + issuesCount: Int! + + """Human-readable summary of the dimension.""" + summary: String +} + +"""Letter grade for a report dimension""" +enum RunReportGrade { + A + B + C + D +} + +"""Test coverage metrics for an analysis run.""" +type RunReportCoverage { + """Letter grade (A, B, C, or D).""" + grade: RunReportGrade + + """Numeric coverage score (0-100).""" + score: Int + + """Percentage of lines covered by tests.""" + lineCoverage: Float + + """Percentage of branches covered by tests.""" + branchCoverage: Float +} + +"""Aggregate code quality score across all dimensions.""" +type RunReportAggregate { + """Overall letter grade (A, B, C, or D).""" + grade: RunReportGrade! + + """Overall numeric score (0-100).""" + score: Int! +} + +"""Metadata about the analysis run scope and limits.""" +type RunReportMetadata { + """Number of lines analyzed.""" + linesChanged: Int! + + """Number of files analyzed.""" + filesChanged: Int! + + """ + Whether the aggregate grade was capped due to critical or security issues. + """ + capped: Boolean! + + """Reason for grade capping.""" + capReason: String +} + +"""Suggested area to focus on for code quality improvement.""" +type RunReportFocusArea { + """The dimension to focus on improving.""" + dimension: String + + """Recommended action to take.""" + action: String +} + +type SCACheckConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [SCACheckEdge]! + totalCount: Int +} + +"""A Relay edge containing a `SCACheck` and its cursor.""" +type SCACheckEdge { + """The item at the end of the edge""" + node: SCACheck + + """A cursor for use in pagination""" + cursor: String! +} + +""" +A single SCA (Software Composition Analysis) check as part of an analysis run. +""" +type SCACheck implements MaskPrimaryKeyNode { + """The ID of the object""" + id: ID! + + """Sequence number of the SCA check in the analysis run it belongs to.""" + sequence: Int! + + """The current status of the SCA check.""" + status: SCACheckStatus! + + """The repository target for which this SCA check was performed.""" + target: RepositoryTarget + + """Time when the SCA check was created.""" + createdAt: DateTime! + + """Time when the SCA check was last modified.""" + updatedAt: DateTime! + + """Time when the SCA check finished.""" + finishedAt: DateTime + + """Summary of the SCA check.""" + summary: SCACheckSummary! + + """Vulnerability occurrences found in the SCA check.""" + vulnerabilityOccurrences(offset: Int, before: String, after: String, first: Int, last: Int): VulnerabilityOccurrenceConnection! +} + +"""An enumeration.""" +enum SCACheckStatus { + WAITING + PENDING + SUCCESS + FAILURE + TIMEOUT + CANCEL + READY + NEUTRAL + ARTIFACT_TIMEOUT + SKIPPED +} + +"""Summary of an SCA check.""" +type SCACheckSummary { + """Number of vulnerabilities introduced in the check.""" + vulnerabilitiesIntroduced: Int +} + +type Query { + """The currently authenticated user.""" + viewer: User + + """Lookup a transformer by its shortcode.""" + transformer( + """Shortcode of the transformer.""" + shortcode: String! + ): Transformer @deprecated(reason: "Use `codeFormatter` instead") + + """List all transformers with optional filtering.""" + transformers(offset: Int, before: String, after: String, first: Int, last: Int, name_Icontains: String): TransformerConnection @deprecated(reason: "Use `codeFormatters` instead") + + """Lookup a repository on DeepSource using it's name and VCS provider.""" + repository( + """The name of the repository to lookup.""" + name: String! + + """ + The login or username of the account under which the repository exists. + """ + login: String! + + """VCS Provider of the repository.""" + vcsProvider: VCSProvider! + ): Repository + + """The DeepSource installation.""" + installation: Installation + + """Lookup a code formatter by its shortcode.""" + codeFormatter( + """Shortcode of the code formatter.""" + shortcode: String! + ): CodeFormatter + + """List all code formatters with optional filtering.""" + codeFormatters(offset: Int, before: String, after: String, first: Int, last: Int, name_Icontains: String): CodeFormatterConnection + + """Get an analyzer from its shortcode.""" + analyzer( + """Shortcode of the analyzer you'd like to get.""" + shortcode: String! + ): Analyzer + + """Get all analyzers available on DeepSource.""" + analyzers(offset: Int, before: String, after: String, first: Int, last: Int): AnalyzerConnection + + """Fetch an AnalysisRun object from it's UID or commit OID.""" + run( + """UID of the Analysis Run you want to get.""" + runUid: UUID + + """Commit OID of the Analysis Run you want to get.""" + commitOid: String + ): AnalysisRun! + + """ + An account on DeepSource (individual or team). A user can add multiple accounts from multiple VCS providers. + """ + account( + """The login or username to lookup the account by.""" + login: String! + + """VCS Provider of the account.""" + vcsProvider: VCSProvider! + ): Account + node( + """The ID of the object""" + id: ID! + ): MaskPrimaryKeyNode +} + +"""A transformer on DeepSource.""" +type Transformer implements MaskPrimaryKeyNode { + analyzer: Analyzer + exampleConfig: String + + """Name of the tool.""" + name: String! + + """Unique identifier for this tool globally.""" + shortcode: String! + + """Verbose description, written in Markdown.""" + description: String! + + """The ID of the object""" + id: ID! + logo: String +} + +type TransformerConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [TransformerEdge]! + totalCount: Int +} + +"""A Relay edge containing a `Transformer` and its cursor.""" +type TransformerEdge { + """The item at the end of the edge""" + node: Transformer + + """A cursor for use in pagination""" + cursor: String! +} + +"""The DeepSource installation.""" +type Installation implements MaskPrimaryKeyNode { + """The name of the installation.""" + name: String! + + """The ID of the object""" + id: ID! + + """The logo URL of the installation.""" + logo: String! +} + +"""A code formatter on DeepSource.""" +type CodeFormatter implements MaskPrimaryKeyNode { + analyzer: Analyzer + exampleConfig: String + + """Name of the tool.""" + name: String! + + """Unique identifier for this tool globally.""" + shortcode: String! + + """Verbose description, written in Markdown.""" + description: String! + + """The ID of the object""" + id: ID! + logo: String +} + +type CodeFormatterConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [CodeFormatterEdge]! + totalCount: Int +} + +"""A Relay edge containing a `CodeFormatter` and its cursor.""" +type CodeFormatterEdge { + """The item at the end of the edge""" + node: CodeFormatter + + """A cursor for use in pagination""" + cursor: String! +} + +type Mutation { + """ + Suppress an issue from an Analyzer on the team level, affecting all repositories. + """ + suppressIssueForTeam(input: SuppressIssueForTeamInput!): SuppressIssueForTeamPayload + + """ + Remove a suppressed issue from an Analyzer on the team level, affecting all repositories. + """ + unsuppressIssueForTeam(input: UnsuppressIssueForTeamInput!): UnsuppressIssueForTeamPayload + + """Update the threshold for a metric in a repository.""" + setRepositoryMetricThreshold(input: SetRepositoryMetricThresholdInput!): SetRepositoryMetricThresholdPayload + + """Regenerate a repository's DSN.""" + regenerateRepositoryDSN(input: RegenerateRepositoryDSNInput!): RegenerateRepositoryDSNPayload + + """ + Update a repository's default branch for baseline. If the repository is activated, this action will trigger a new analysis. Only available to users with `WRITE` permission on the repository. + """ + updateRepositoryDefaultBranch(input: UpdateRepositoryDefaultBranchInput!): UpdateRepositoryDefaultBranchPayload + + """Update the configuration for an issue category in a repository.""" + updateRepositoryIssueCategorySetting(input: UpdateRepositoryIssueCategorySettingInput!): UpdateRepositoryIssueCategorySettingPayload + + """Update the configuration for an issue priority in a repository.""" + updateRepositoryIssuePrioritySetting(input: UpdateRepositoryIssuePrioritySettingInput!): UpdateRepositoryIssuePrioritySettingPayload + + """Update the configuration for a metric in a repository.""" + updateRepositoryMetricSetting(input: UpdateRepositoryMetricSettingInput!): UpdateRepositoryMetricSettingPayload + + """ + Activate a repository. Only available to users with `WRITE` permission on the repository. + """ + activateRepository(input: ActivateRepositoryInput!): ActivateRepositoryPayload + + """ + Deactivate a repository. Only available to users with `WRITE` permission on the repository. + """ + deactivateRepository(input: DeactivateRepositoryInput!): DeactivateRepositoryPayload + revokeToken(refreshToken: String): Revoke + verifyToken(token: String): Verify + refreshToken(refreshToken: String): Refresh + + """ + Generates a unique device verification code and an end-user code that are + valid for a limited time. + """ + registerDevice(input: RegisterDeviceInput!): RegisterDevicePayload + + """ + Validates the device code provided and responds with the user's PAT. + If there's no PAT associated with the user, create one. If the user + has no access, return an error. Otherwise, indicate the client to keep + polling. + """ + requestPatWithDeviceCode(input: RequestPATWithDeviceCodeInput!): RequestPATWithDeviceCodePayload + refreshPat: RefreshPAT + revokePat: RevokePAT +} + +""" +Suppress an issue from an Analyzer on the team level, affecting all repositories. +""" +type SuppressIssueForTeamPayload { + ok: Boolean + clientMutationId: String +} + +input SuppressIssueForTeamInput { + """The issue's shortcode.""" + issueShortcode: String! + + """The login or username of the account/team.""" + login: String! + + """The VCS provider of the account/team.""" + vcsProvider: VCSProvider! + clientMutationId: String +} + +""" +Remove a suppressed issue from an Analyzer on the team level, affecting all repositories. +""" +type UnsuppressIssueForTeamPayload { + ok: Boolean + clientMutationId: String +} + +input UnsuppressIssueForTeamInput { + """The issue's shortcode.""" + issueShortcode: String! + + """The login or username of the account/team.""" + login: String! + + """The VCS provider of the account/team.""" + vcsProvider: VCSProvider! + clientMutationId: String +} + +"""Update the threshold for a metric in a repository.""" +type SetRepositoryMetricThresholdPayload { + ok: Boolean + clientMutationId: String +} + +input SetRepositoryMetricThresholdInput { + """GraphQL node ID of the repository.""" + repositoryId: ID! + + """Metric shortcode to update the threshold for.""" + metricShortcode: MetricShortcode! + + """The key of the metric you want to update the threshold for.""" + metricKey: MetricKey! + + """Threshold value to set. Can be null.""" + thresholdValue: Int + clientMutationId: String +} + +type RegenerateRepositoryDSNPayload { + """The new DSN for the repository.""" + dsn: String! + clientMutationId: String +} + +input RegenerateRepositoryDSNInput { + """GraphQL node ID of the repository.""" + repositoryId: ID! + clientMutationId: String +} + +type UpdateRepositoryDefaultBranchPayload { + ok: Boolean! + repository: Repository! + clientMutationId: String +} + +input UpdateRepositoryDefaultBranchInput { + """GraphQL node ID of the repository.""" + id: ID! + + """Default branch for analysis on the repository.""" + defaultBranchName: String! + clientMutationId: String +} + +type UpdateRepositoryIssueCategorySettingPayload { + ok: Boolean! + clientMutationId: String +} + +input UpdateRepositoryIssueCategorySettingInput { + """The repository's ID.""" + repositoryId: ID! + + """The issue category you want to update.""" + issueCategory: IssueCategory! + + """ + Whether issues of given category are enabled for reporting in the repository. + """ + isReported: Boolean! + + """ + Whether to fail checks when occurrence(s) of issues of given category are found in the repository. An issue category can only be marked to fail a check if it is enabled for reporting. + """ + canFailCheck: Boolean! + clientMutationId: String +} + +type UpdateRepositoryIssuePrioritySettingPayload { + ok: Boolean! + clientMutationId: String +} + +input UpdateRepositoryIssuePrioritySettingInput { + """The repository's ID.""" + repositoryId: ID! + + """The issue priority you want to update.""" + issuePriorityType: IssuePriorityType! + + """ + Whether issues of given priority are enabled for reporting in the repository. + """ + isReported: Boolean! + + """ + Whether to fail checks when occurrence(s) of issues of given priority are found in the repository. + """ + canFailCheck: Boolean! + clientMutationId: String +} + +type UpdateRepositoryMetricSettingPayload { + ok: Boolean! + clientMutationId: String +} + +input UpdateRepositoryMetricSettingInput { + """The repository's ID.""" + repositoryId: ID! + + """The metric to update.""" + metricShortcode: MetricShortcode! + + """Whether the metric is enabled for reporting in the repository.""" + isReported: Boolean! + + """Whether to fail checks when the metric does not meet the threshold.""" + isThresholdEnforced: Boolean! + clientMutationId: String +} + +type ActivateRepositoryPayload { + """Whether the repository has been activated successfully""" + ok: Boolean! + clientMutationId: String +} + +input ActivateRepositoryInput { + """The repository's ID.""" + repositoryId: ID! + clientMutationId: String +} + +type DeactivateRepositoryPayload { + """Whether the repository has been deactivated successfully""" + ok: Boolean! + clientMutationId: String +} + +input DeactivateRepositoryInput { + """The repository's ID.""" + repositoryId: ID! + clientMutationId: String +} + +type Revoke { + revoked: Int! +} + +type Verify { + payload: GenericScalar! +} + +""" +The `GenericScalar` scalar type represents a generic +GraphQL scalar value that could be: +String, Boolean, Int, Float, List or Object. +""" +scalar GenericScalar + +type Refresh { + payload: GenericScalar! + refreshExpiresIn: Int! + token: String! + refreshToken: String! +} + +""" +Generates a unique device verification code and an end-user code that are +valid for a limited time. +""" +type RegisterDevicePayload { + """The device verification code.""" + deviceCode: String + + """The end-user verification code.""" + userCode: String + + """The end-user verification URI.""" + verificationUri: String + + """A verification URI that includes the 'user_code'.""" + verificationUriComplete: String + + """The lifetime in seconds of the 'device_code' and 'user_code'.""" + expiresIn: Int + + """ + The minimum amount of time in seconds that the client should wait between polling requests to the token endpoint. + """ + interval: Int + clientMutationId: String +} + +input RegisterDeviceInput { + deviceType: DeviceType = CLI + clientMutationId: String +} + +"""The device type that is being registered.""" +enum DeviceType { + CLI + IDE +} + +""" +Validates the device code provided and responds with the user's PAT. +If there's no PAT associated with the user, create one. If the user +has no access, return an error. Otherwise, indicate the client to keep +polling. +""" +type RequestPATWithDeviceCodePayload { + """The personal access token corresponding to the device_code""" + token: String + + """Expiry of the token, in unix time""" + expiry: DateTime + user: User + clientMutationId: String +} + +input RequestPATWithDeviceCodeInput { + deviceCode: String! + description: String + clientMutationId: String +} + +type RefreshPAT { + """The personal access token corresponding to the device_code""" + token: String + + """Expiry of the token, in unix time""" + expiry: DateTime + user: User +} + +type RevokePAT { + """Indication whether revoking of personal access token was successful""" + ok: Boolean +} \ No newline at end of file diff --git a/scripts/gen-completions.sh b/scripts/gen-completions.sh deleted file mode 100755 index 7ab14c68..00000000 --- a/scripts/gen-completions.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -set -e -rm -rf completions -mkdir completions - -# Generate completion using the in-built cobra completion command -for shell in bash zsh fish; do - go run cmd/deepsource/main.go completion "$shell" > "completions/deepsource.$shell" -done diff --git a/scripts/install.sh.template b/scripts/install.sh.template new file mode 100644 index 00000000..e4d41c5c --- /dev/null +++ b/scripts/install.sh.template @@ -0,0 +1,115 @@ +#!/bin/sh +# DeepSource CLI installer +# Usage: curl -fsSL https://DOMAIN/install | sh + +set -e + +INSTALL_DIR="${DEEPSOURCE_INSTALL_DIR:-}" +BASE_URL="__BASE_URL__" +BINARY_NAME="__BINARY_NAME__" + +pass() { printf '\033[0;32m✓\033[0m %s\n' "$1"; } +fail() { printf '\033[0;31m✗ %s\033[0m\n' "$1" >&2; exit 1; } +info() { printf '\033[0;36m→\033[0m %s\n' "$1"; } + +detect_os() { + case "$(uname -s)" in + Linux*) echo "linux" ;; + Darwin*) echo "darwin" ;; + *) fail "Unsupported OS: $(uname -s)" ;; + esac +} + +detect_arch() { + case "$(uname -m)" in + x86_64|amd64) echo "amd64" ;; + arm64|aarch64) echo "arm64" ;; + *) fail "Unsupported architecture: $(uname -m)" ;; + esac +} + +main() { + command -v curl >/dev/null 2>&1 || fail "curl is required but not found" + command -v tar >/dev/null 2>&1 || fail "tar is required but not found" + command -v sha256sum >/dev/null 2>&1 || command -v shasum >/dev/null 2>&1 || fail "sha256sum or shasum is required" + pass "Dependencies verified" + + OS="$(detect_os)" + ARCH="$(detect_arch)" + pass "Platform: ${OS}/${ARCH}" + + MANIFEST="$(curl -fsSL "${BASE_URL}/manifest.json" | tr -d ' \n\r\t')" + VERSION="$(printf '%s' "$MANIFEST" | grep -o '"version":"[^"]*"' | head -1 | cut -d'"' -f4)" + + if [ -z "$VERSION" ]; then + fail "Failed to determine latest version from manifest" + fi + + pass "Version: v${VERSION}" + + PLATFORM_KEY="${OS}_${ARCH}" + ARCHIVE="$(printf '%s' "$MANIFEST" | grep -o "\"${PLATFORM_KEY}\":{[^}]*}" | grep -o '"archive":"[^"]*"' | cut -d'"' -f4)" + EXPECTED_SHA="$(printf '%s' "$MANIFEST" | grep -o "\"${PLATFORM_KEY}\":{[^}]*}" | grep -o '"sha256":"[^"]*"' | cut -d'"' -f4)" + + if [ -z "$ARCHIVE" ]; then + fail "No build available for ${OS}/${ARCH}" + fi + + ARCHIVE_URL="${BASE_URL}/build/${ARCHIVE}" + TMPDIR="$(mktemp -d)" + trap 'rm -rf "$TMPDIR"' EXIT + + curl -fsSL -o "${TMPDIR}/${ARCHIVE}" "$ARCHIVE_URL" + pass "Downloaded" + + if command -v sha256sum >/dev/null 2>&1; then + ACTUAL_SHA="$(sha256sum "${TMPDIR}/${ARCHIVE}" | awk '{print $1}')" + else + ACTUAL_SHA="$(shasum -a 256 "${TMPDIR}/${ARCHIVE}" | awk '{print $1}')" + fi + + if [ "$ACTUAL_SHA" != "$EXPECTED_SHA" ]; then + fail "Checksum mismatch: expected ${EXPECTED_SHA}, got ${ACTUAL_SHA}" + fi + + pass "Checksum verified" + + # Determine install directory + if [ -n "$INSTALL_DIR" ]; then + BIN_DIR="$INSTALL_DIR" + elif [ "$(id -u)" = "0" ]; then + BIN_DIR="/usr/local/bin" + else + BIN_DIR="${HOME}/.local/bin" + fi + + mkdir -p "$BIN_DIR" + + tar -xzf "${TMPDIR}/${ARCHIVE}" -C "$TMPDIR" + install -m 755 "${TMPDIR}/${BINARY_NAME}" "${BIN_DIR}/${BINARY_NAME}" + + # macOS: clear quarantine and re-sign + if [ "$OS" = "darwin" ]; then + xattr -dr com.apple.quarantine "${BIN_DIR}/${BINARY_NAME}" 2>/dev/null || true + codesign --force --sign - "${BIN_DIR}/${BINARY_NAME}" 2>/dev/null || true + fi + + pass "Installed to ${BIN_DIR}/${BINARY_NAME}" + + echo "" + + # Check if install dir is in PATH + case ":${PATH}:" in + *":${BIN_DIR}:"*) + info "Run '${BINARY_NAME}' to get started" + ;; + *) + info "Add ${BIN_DIR} to your PATH:" + printf ' export PATH="%s:$PATH"\n' "$BIN_DIR" + echo "" + info "Then run '${BINARY_NAME}' to get started" + ;; + esac +} + +main diff --git a/utils/fetch_analyzers_transformers.go b/utils/fetch_analyzers_transformers.go deleted file mode 100644 index 2805459a..00000000 --- a/utils/fetch_analyzers_transformers.go +++ /dev/null @@ -1,84 +0,0 @@ -package utils - -import ( - "context" - - "github.com/deepsourcelabs/cli/deepsource" - "github.com/deepsourcelabs/cli/deepsource/analyzers" - "github.com/deepsourcelabs/cli/deepsource/transformers" -) - -type DeepSourceAnalyzersData struct { - AnalyzerNames []string - AnalyzerShortcodes []string - AnalyzersMap map[string]string // Map for {analyzer name : shortcode} - AnalyzersMeta []string - AnalyzersMetaMap map[string]string // Map for {analyzer name: analyzer meta-schema} -} - -type DeepSourceTransformersData struct { - TransformerNames []string - TransformerShortcodes []string - TransformerMap map[string]string // Map for {transformer name:shortcode} -} - -var ( - AnalyzersData DeepSourceAnalyzersData - TransformersData DeepSourceTransformersData -) - -var ( - analyzersAPIResponse []analyzers.Analyzer - transformersAPIResponse []transformers.Transformer -) - -// Get the list of all the supported analyzers and transformers with -// their corresponding data like shortcode, metaschema etc. -func GetAnalyzersAndTransformersData(ctx context.Context, deepsource deepsource.Client) (err error) { - // Get supported analyzers and transformers data - AnalyzersData.AnalyzersMap = make(map[string]string) - TransformersData.TransformerMap = make(map[string]string) - - analyzersAPIResponse, err = deepsource.GetSupportedAnalyzers(ctx) - if err != nil { - return err - } - - transformersAPIResponse, err = deepsource.GetSupportedTransformers(ctx) - if err != nil { - return err - } - parseSDKResponse() - return nil -} - -// Parses the SDK response of analyzers and transformers data into the format required -// by the validator and generator package -func parseSDKResponse() { - analyzersMap := make(map[string]string) - analyzersMetaMap := make(map[string]string) - transformersMap := make(map[string]string) - - for _, analyzer := range analyzersAPIResponse { - analyzersMap[analyzer.Name] = analyzer.Shortcode - analyzersMetaMap[analyzer.Shortcode] = analyzer.MetaSchema - - AnalyzersData = DeepSourceAnalyzersData{ - AnalyzerNames: append(AnalyzersData.AnalyzerNames, analyzer.Name), - AnalyzerShortcodes: append(AnalyzersData.AnalyzerShortcodes, analyzer.Shortcode), - AnalyzersMap: analyzersMap, - AnalyzersMeta: append(AnalyzersData.AnalyzersMeta, analyzer.MetaSchema), - AnalyzersMetaMap: analyzersMetaMap, - } - } - - for _, transformer := range transformersAPIResponse { - transformersMap[transformer.Name] = transformer.Shortcode - - TransformersData = DeepSourceTransformersData{ - TransformerNames: append(TransformersData.TransformerNames, transformer.Name), - TransformerShortcodes: append(TransformersData.TransformerShortcodes, transformer.Shortcode), - TransformerMap: transformersMap, - } - } -}