diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml new file mode 100644 index 00000000..87494596 --- /dev/null +++ b/.github/workflows/actions.yml @@ -0,0 +1,34 @@ + +name: Wait for Other Runs testing + +on: + push: + +permissions: + actions: read + contents: read + +jobs: + scan-secrets: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: '1.22' + - name: Wait for other runs on same commit + working-directory: src/pipeleak + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_SHA: ${{ github.sha }} + GITHUB_RUN_ID: ${{ github.run_id }} + run: go run main.go gh action + dummy-wait: + runs-on: ubuntu-latest + steps: + - name: Wait 3 minutes, echo test, then wait 2 more minutes + run: | + sleep 20 + echo test + sleep 120 \ No newline at end of file diff --git a/src/pipeleak/cmd/github/action.go b/src/pipeleak/cmd/github/action.go new file mode 100644 index 00000000..0f01298d --- /dev/null +++ b/src/pipeleak/cmd/github/action.go @@ -0,0 +1,149 @@ +package github + +import ( + "context" + "os" + "strconv" + "strings" + "sync" + "time" + + "github.com/CompassSecurity/pipeleak/helper" + "github.com/CompassSecurity/pipeleak/scanner" + "github.com/google/go-github/v69/github" + "github.com/rs/zerolog/log" + "github.com/spf13/cobra" +) + +func NewActionScanCmd() *cobra.Command { + scanActionCmd := &cobra.Command{ + Use: "action", + Short: "Scan all jobs of the action workflow running", + Long: `Scan GitHub Actions workflow runs and artifacts for secrets`, + Example: `pipeleak gh action -t $GH_TOKEN`, + Run: ScanAction, + } + + scanActionCmd.PersistentFlags().BoolVarP(&options.Verbose, "verbose", "v", false, "Verbose logging") + + return scanActionCmd +} + +func ScanAction(cmd *cobra.Command, args []string) { + options.HttpClient = helper.GetPipeleakHTTPClient() + helper.SetLogLevel(options.Verbose) + scanner.InitRules(options.ConfidenceFilter) + scanWorkflowRuns() + log.Info().Msg("Scan Finished, Bye Bye 🏳️‍🌈🔥") +} + +func scanWorkflowRuns() { + log.Info().Msg("Scanning GitHub Actions workflow runs for secrets") + options.Context = context.WithValue(context.Background(), github.BypassRateLimitCheck, true) + + var wg sync.WaitGroup + scannedRuns := make(map[int64]struct{}) + + token := os.Getenv("GITHUB_TOKEN") + if token == "" { + log.Fatal().Msg("GITHUB_TOKEN not set") + } + + client := setupClient(token) + options.Client = client + + repoFull := os.Getenv("GITHUB_REPOSITORY") + if repoFull == "" { + log.Fatal().Msg("GITHUB_REPOSITORY not set") + } + + parts := strings.Split(repoFull, "/") + if len(parts) != 2 { + log.Fatal().Str("repository", repoFull).Msg("invalid GITHUB_REPOSITORY") + } + + owner, repo := parts[0], parts[1] + log.Info().Str("owner", owner).Str("repo", repo).Msg("Repository to scan") + + repository, _, err := client.Repositories.Get(options.Context, owner, repo) + if err != nil { + log.Fatal().Err(err).Msg("Failed to fetch repository") + } + + sha := os.Getenv("GITHUB_SHA") + if sha == "" { + log.Fatal().Msg("GITHUB_SHA not set") + } else { + log.Info().Str("sha", sha).Msg("Current commit sha") + } + + runIDStr := os.Getenv("GITHUB_RUN_ID") + if runIDStr == "" { + log.Fatal().Msg("GITHUB_RUN_ID not set") + } else { + log.Info().Str("runID", runIDStr).Msg("Current run ID") + } + + currentRunID, _ := strconv.ParseInt(runIDStr, 10, 64) + opts := &github.ListWorkflowRunsOptions{ + ListOptions: github.ListOptions{PerPage: 100}, + HeadSHA: sha, + } + + for { + allRunsCompleted := true + + for { + runs, resp, err := client.Actions.ListRepositoryWorkflowRuns(options.Context, owner, repo, opts) + + log.Info().Int("count", len(runs.WorkflowRuns)).Msg("Fetched workflow runs") + + if err != nil { + log.Fatal().Stack().Err(err).Msg("Failed listing workflow runs") + } + + for _, run := range runs.WorkflowRuns { + status := run.GetStatus() + if status != "completed" { + allRunsCompleted = false + } + + if run.GetID() != currentRunID { + log.Info().Int64("run", run.GetID()).Str("status", status).Str("name", run.GetName()).Str("url", *run.HTMLURL).Msgf("Workflow run") + + if status == "completed" { + if _, scanned := scannedRuns[run.GetID()]; !scanned { + scannedRuns[run.GetID()] = struct{}{} + wg.Add(1) + go func(runCopy *github.WorkflowRun) { + defer wg.Done() + log.Warn().Int64("run", run.GetID()).Str("status", status).Str("name", run.GetName()).Str("url", *run.HTMLURL).Msgf("Running scan for workflow run") + scanRun(client, repository, runCopy) + }(run) + } + } + } + } + + if resp.NextPage == 0 { + break + } + opts.Page = resp.NextPage + } + + if allRunsCompleted { + log.Info().Msg("⏳ Waiting for any remaining scans to finish...") + break + } else { + log.Info().Msg("⏳ Some runs are still running") + time.Sleep(3 * time.Second) + } + } + + wg.Wait() +} + +func scanRun(client *github.Client, repo *github.Repository, workflowRun *github.WorkflowRun) { + downloadWorkflowRunLog(client, repo, workflowRun) + listArtifacts(client, workflowRun) +} diff --git a/src/pipeleak/cmd/github/github.go b/src/pipeleak/cmd/github/github.go index 802fdb77..85d98755 100644 --- a/src/pipeleak/cmd/github/github.go +++ b/src/pipeleak/cmd/github/github.go @@ -12,6 +12,7 @@ func NewGitHubRootCmd() *cobra.Command { } ghCmd.AddCommand(NewScanCmd()) + ghCmd.AddCommand(NewActionScanCmd()) return ghCmd } diff --git a/src/pipeleak/cmd/github/helper.go b/src/pipeleak/cmd/github/helper.go new file mode 100644 index 00000000..c9d194f9 --- /dev/null +++ b/src/pipeleak/cmd/github/helper.go @@ -0,0 +1,282 @@ +package github + +import ( + "archive/zip" + "bytes" + "context" + "io" + "net/http" + "time" + + "github.com/CompassSecurity/pipeleak/scanner" + "github.com/gofri/go-github-ratelimit/v2/github_ratelimit" + "github.com/gofri/go-github-ratelimit/v2/github_ratelimit/github_primary_ratelimit" + "github.com/gofri/go-github-ratelimit/v2/github_ratelimit/github_secondary_ratelimit" + "github.com/google/go-github/v69/github" + "github.com/h2non/filetype" + "github.com/rs/zerolog/log" + "github.com/wandb/parallel" +) + +type GitHubScanOptions struct { + AccessToken string + Verbose bool + ConfidenceFilter []string + MaxScanGoRoutines int + TruffleHogVerification bool + MaxWorkflows int + Organization string + Owned bool + User string + Public bool + SearchQuery string + Artifacts bool + Context context.Context + Client *github.Client + HttpClient *http.Client +} + +var options = GitHubScanOptions{} + +func setupClient(accessToken string) *github.Client { + rateLimiter := github_ratelimit.New(nil, + github_primary_ratelimit.WithLimitDetectedCallback(func(ctx *github_primary_ratelimit.CallbackContext) { + resetTime := ctx.ResetTime.Add(time.Duration(time.Second * 30)) + log.Info().Str("category", string(ctx.Category)).Time("reset", resetTime).Msg("Primary rate limit detected, will resume automatically") + time.Sleep(time.Until(resetTime)) + log.Info().Str("category", string(ctx.Category)).Msg("Resuming") + }), + github_secondary_ratelimit.WithLimitDetectedCallback(func(ctx *github_secondary_ratelimit.CallbackContext) { + resetTime := ctx.ResetTime.Add(time.Duration(time.Second * 30)) + log.Info().Time("reset", *ctx.ResetTime).Dur("totalSleep", *ctx.TotalSleepTime).Msg("Secondary rate limit detected, will resume automatically") + time.Sleep(time.Until(resetTime)) + log.Info().Msg("Resuming") + }), + ) + + return github.NewClient(&http.Client{Transport: rateLimiter}).WithAuthToken(accessToken) +} + +func downloadWorkflowRunLog(client *github.Client, repo *github.Repository, workflowRun *github.WorkflowRun) { + logURL, resp, err := client.Actions.GetWorkflowRunLogs(options.Context, *repo.Owner.Login, *repo.Name, *workflowRun.ID, 5) + + if resp == nil { + log.Trace().Msg("downloadWorkflowRunLog Empty response") + return + } + + // already deleted, skip + if resp.StatusCode == 410 { + log.Debug().Str("workflowRunName", *workflowRun.Name).Msg("Skipped expired") + return + } else if resp.StatusCode == 404 { + return + } + + if err != nil { + log.Error().Stack().Err(err).Msg("Failed getting workflow run log URL") + return + } + + log.Trace().Msg("Downloading run log") + logs := downloadRunLogZIP(logURL.String()) + log.Trace().Msg("Finished downloading run log") + findings, err := scanner.DetectHits(logs, options.MaxScanGoRoutines, options.TruffleHogVerification) + if err != nil { + log.Debug().Err(err).Str("workflowRun", *workflowRun.HTMLURL).Msg("Failed detecting secrets") + return + } + + for _, finding := range findings { + log.Warn().Str("confidence", finding.Pattern.Pattern.Confidence).Str("ruleName", finding.Pattern.Pattern.Name).Str("value", finding.Text).Str("workflowRun", *workflowRun.HTMLURL).Msg("HIT") + } + log.Trace().Msg("Finished scannig run log") +} + +func iterateWorkflowRuns(client *github.Client, repo *github.Repository) { + opt := github.ListWorkflowRunsOptions{ + ListOptions: github.ListOptions{PerPage: 100}, + } + wfCount := 0 + for { + workflowRuns, resp, err := client.Actions.ListRepositoryWorkflowRuns(options.Context, *repo.Owner.Login, *repo.Name, &opt) + + if resp == nil { + log.Trace().Msg("Empty response due to rate limit, resume now<") + continue + } + + if resp.StatusCode == 404 { + return + } + + if err != nil { + log.Error().Stack().Err(err).Msg("Failed fetching workflow runs") + return + } + + for _, workflowRun := range workflowRuns.WorkflowRuns { + log.Debug().Str("name", *workflowRun.DisplayTitle).Str("url", *workflowRun.HTMLURL).Msg("Workflow run") + downloadWorkflowRunLog(client, repo, workflowRun) + + if options.Artifacts { + listArtifacts(client, workflowRun) + } + + wfCount = wfCount + 1 + if wfCount >= options.MaxWorkflows && options.MaxWorkflows > 0 { + log.Debug().Str("name", *workflowRun.DisplayTitle).Str("url", *workflowRun.HTMLURL).Msg("Reached MaxWorkflow runs, skip remaining") + return + } + } + + if resp.NextPage == 0 { + break + } + opt.Page = resp.NextPage + } +} + +func downloadRunLogZIP(url string) []byte { + res, err := options.HttpClient.Get(url) + logLines := make([]byte, 0) + + if err != nil { + return logLines + } + + if res.StatusCode == 200 { + body, err := io.ReadAll(res.Body) + if err != nil { + log.Err(err).Msg("Failed reading response log body") + return logLines + } + + zipReader, err := zip.NewReader(bytes.NewReader(body), int64(len(body))) + if err != nil { + log.Err(err).Msg("Failed creating zip reader") + return logLines + } + + for _, zipFile := range zipReader.File { + log.Trace().Str("zipFile", zipFile.Name).Msg("Zip file") + unzippedFileBytes, err := readZipFile(zipFile) + if err != nil { + log.Err(err).Msg("Failed reading zip file") + continue + } + + logLines = append(logLines, unzippedFileBytes...) + } + } + + return logLines +} + +func readZipFile(zf *zip.File) ([]byte, error) { + f, err := zf.Open() + if err != nil { + return nil, err + } + defer f.Close() + return io.ReadAll(f) +} + +func listArtifacts(client *github.Client, workflowRun *github.WorkflowRun) { + listOpt := github.ListOptions{PerPage: 100} + for { + artifactList, resp, err := client.Actions.ListWorkflowRunArtifacts(options.Context, *workflowRun.Repository.Owner.Login, *workflowRun.Repository.Name, *workflowRun.ID, &listOpt) + if resp == nil { + return + } + + if resp.StatusCode == 404 { + return + } + + if err != nil { + log.Error().Stack().Err(err).Msg("Failed fetching artifacts list") + return + } + + for _, artifact := range artifactList.Artifacts { + log.Debug().Str("name", *artifact.Name).Str("url", *artifact.ArchiveDownloadURL).Msg("Scan") + analyzeArtifact(client, workflowRun, artifact) + } + + if resp.NextPage == 0 { + break + } + listOpt.Page = resp.NextPage + } +} + +func analyzeArtifact(client *github.Client, workflowRun *github.WorkflowRun, artifact *github.Artifact) { + + url, resp, err := client.Actions.DownloadArtifact(options.Context, *workflowRun.Repository.Owner.Login, *workflowRun.Repository.Name, *artifact.ID, 5) + + if resp == nil { + log.Trace().Msg("analyzeArtifact Empty response") + return + } + + // already deleted, skip + if resp.StatusCode == 410 { + log.Debug().Str("workflowRunName", *workflowRun.Name).Msg("Skipped expired artifact") + return + } + + if err != nil { + log.Err(err).Msg("Failed getting artifact download URL") + return + } + + res, err := options.HttpClient.Get(url.String()) + + if err != nil { + log.Err(err).Str("workflow", url.String()).Msg("Failed downloading artifacts zip") + return + } + + if res.StatusCode == 200 { + body, err := io.ReadAll(res.Body) + if err != nil { + log.Err(err).Msg("Failed reading response log body") + return + } + zipListing, err := zip.NewReader(bytes.NewReader(body), int64(len(body))) + if err != nil { + log.Err(err).Str("url", url.String()).Msg("Failed creating zip reader") + return + } + + ctx := options.Context + group := parallel.Limited(ctx, options.MaxScanGoRoutines) + for _, file := range zipListing.File { + group.Go(func(ctx context.Context) { + fc, err := file.Open() + if err != nil { + log.Error().Stack().Err(err).Msg("Unable to open raw artifact zip file") + return + } + + content, err := io.ReadAll(fc) + if err != nil { + log.Error().Stack().Err(err).Msg("Unable to readAll artifact zip file") + return + } + + kind, _ := filetype.Match(content) + // do not scan https://pkg.go.dev/github.com/h2non/filetype#readme-supported-types + if kind == filetype.Unknown { + scanner.DetectFileHits(content, *workflowRun.HTMLURL, *workflowRun.Name, file.Name, "", options.TruffleHogVerification) + } else if filetype.IsArchive(content) { + scanner.HandleArchiveArtifact(file.Name, content, *workflowRun.HTMLURL, *workflowRun.Name, options.TruffleHogVerification) + } + fc.Close() + }) + } + + group.Wait() + } +} diff --git a/src/pipeleak/cmd/github/scan.go b/src/pipeleak/cmd/github/scan.go index 1b6b86ae..188cfccd 100644 --- a/src/pipeleak/cmd/github/scan.go +++ b/src/pipeleak/cmd/github/scan.go @@ -1,50 +1,20 @@ package github import ( - "archive/zip" - "bytes" "context" - "io" - "net/http" "sort" - "time" "github.com/CompassSecurity/pipeleak/helper" "github.com/CompassSecurity/pipeleak/scanner" - "github.com/gofri/go-github-ratelimit/v2/github_ratelimit" - "github.com/gofri/go-github-ratelimit/v2/github_ratelimit/github_primary_ratelimit" - "github.com/gofri/go-github-ratelimit/v2/github_ratelimit/github_secondary_ratelimit" "github.com/google/go-github/v69/github" - "github.com/h2non/filetype" "github.com/rs/zerolog" "github.com/rs/zerolog/log" "github.com/spf13/cobra" - "github.com/wandb/parallel" ) -type GitHubScanOptions struct { - AccessToken string - Verbose bool - ConfidenceFilter []string - MaxScanGoRoutines int - TruffleHogVerification bool - MaxWorkflows int - Organization string - Owned bool - User string - Public bool - SearchQuery string - Artifacts bool - Context context.Context - Client *github.Client - HttpClient *http.Client -} - -var options = GitHubScanOptions{} - func NewScanCmd() *cobra.Command { scanCmd := &cobra.Command{ - Use: "scan [no options!]", + Use: "scan", Short: "Scan GitHub Actions", Long: `Scan GitHub Actions workflow runs and artifacts for secrets`, Example: ` @@ -99,25 +69,6 @@ func Scan(cmd *cobra.Command, args []string) { log.Info().Msg("Scan Finished, Bye Bye 🏳️‍🌈🔥") } -func setupClient(accessToken string) *github.Client { - rateLimiter := github_ratelimit.New(nil, - github_primary_ratelimit.WithLimitDetectedCallback(func(ctx *github_primary_ratelimit.CallbackContext) { - resetTime := ctx.ResetTime.Add(time.Duration(time.Second * 30)) - log.Info().Str("category", string(ctx.Category)).Time("reset", resetTime).Msg("Primary rate limit detected, will resume automatically") - time.Sleep(time.Until(resetTime)) - log.Info().Str("category", string(ctx.Category)).Msg("Resuming") - }), - github_secondary_ratelimit.WithLimitDetectedCallback(func(ctx *github_secondary_ratelimit.CallbackContext) { - resetTime := ctx.ResetTime.Add(time.Duration(time.Second * 30)) - log.Info().Time("reset", *ctx.ResetTime).Dur("totalSleep", *ctx.TotalSleepTime).Msg("Secondary rate limit detected, will resume automatically") - time.Sleep(time.Until(resetTime)) - log.Info().Msg("Resuming") - }), - ) - - return github.NewClient(&http.Client{Transport: rateLimiter}).WithAuthToken(accessToken) -} - func scan(client *github.Client) { if options.Owned { log.Info().Msg("Scanning authenticated user's owned repositories actions") @@ -299,131 +250,6 @@ func scanRepositories(client *github.Client) { } } -func iterateWorkflowRuns(client *github.Client, repo *github.Repository) { - opt := github.ListWorkflowRunsOptions{ - ListOptions: github.ListOptions{PerPage: 100}, - } - wfCount := 0 - for { - workflowRuns, resp, err := client.Actions.ListRepositoryWorkflowRuns(options.Context, *repo.Owner.Login, *repo.Name, &opt) - - if resp == nil { - log.Trace().Msg("Empty response due to rate limit, resume now<") - continue - } - - if resp.StatusCode == 404 { - return - } - - if err != nil { - log.Error().Stack().Err(err).Msg("Failed fetching workflow runs") - return - } - - for _, workflowRun := range workflowRuns.WorkflowRuns { - log.Debug().Str("name", *workflowRun.DisplayTitle).Str("url", *workflowRun.HTMLURL).Msg("Workflow run") - downloadWorkflowRunLog(client, repo, workflowRun) - - if options.Artifacts { - listArtifacts(client, workflowRun) - } - - wfCount = wfCount + 1 - if wfCount >= options.MaxWorkflows && options.MaxWorkflows > 0 { - log.Debug().Str("name", *workflowRun.DisplayTitle).Str("url", *workflowRun.HTMLURL).Msg("Reached MaxWorkflow runs, skip remaining") - return - } - } - - if resp.NextPage == 0 { - break - } - opt.Page = resp.NextPage - } -} - -func downloadWorkflowRunLog(client *github.Client, repo *github.Repository, workflowRun *github.WorkflowRun) { - logURL, resp, err := client.Actions.GetWorkflowRunLogs(options.Context, *repo.Owner.Login, *repo.Name, *workflowRun.ID, 5) - - if resp == nil { - log.Trace().Msg("downloadWorkflowRunLog Empty response") - return - } - - // already deleted, skip - if resp.StatusCode == 410 { - log.Debug().Str("workflowRunName", *workflowRun.Name).Msg("Skipped expired") - return - } else if resp.StatusCode == 404 { - return - } - - if err != nil { - log.Error().Stack().Err(err).Msg("Failed getting workflow run log URL") - return - } - - log.Trace().Msg("Downloading run log") - logs := downloadRunLogZIP(logURL.String()) - log.Trace().Msg("Finished downloading run log") - findings, err := scanner.DetectHits(logs, options.MaxScanGoRoutines, options.TruffleHogVerification) - if err != nil { - log.Debug().Err(err).Str("workflowRun", *workflowRun.HTMLURL).Msg("Failed detecting secrets") - return - } - - for _, finding := range findings { - log.Warn().Str("confidence", finding.Pattern.Pattern.Confidence).Str("ruleName", finding.Pattern.Pattern.Name).Str("value", finding.Text).Str("workflowRun", *workflowRun.HTMLURL).Msg("HIT") - } - log.Trace().Msg("Finished scannig run log") -} - -func downloadRunLogZIP(url string) []byte { - res, err := options.HttpClient.Get(url) - logLines := make([]byte, 0) - - if err != nil { - return logLines - } - - if res.StatusCode == 200 { - body, err := io.ReadAll(res.Body) - if err != nil { - log.Err(err).Msg("Failed reading response log body") - return logLines - } - - zipReader, err := zip.NewReader(bytes.NewReader(body), int64(len(body))) - if err != nil { - log.Err(err).Msg("Failed creating zip reader") - return logLines - } - - for _, zipFile := range zipReader.File { - log.Trace().Str("zipFile", zipFile.Name).Msg("Zip file") - unzippedFileBytes, err := readZipFile(zipFile) - if err != nil { - log.Err(err).Msg("Failed reading zip file") - continue - } - - logLines = append(logLines, unzippedFileBytes...) - } - } - - return logLines -} - -func readZipFile(zf *zip.File) ([]byte, error) { - f, err := zf.Open() - if err != nil { - return nil, err - } - defer f.Close() - return io.ReadAll(f) -} - func identifyNewestPublicProjectId(client *github.Client) int64 { for { listOpts := github.ListOptions{PerPage: 1000} @@ -453,102 +279,3 @@ func identifyNewestPublicProjectId(client *github.Client) int64 { log.Fatal().Msg("Failed finding a CreateEvent and thus no rerpository id") return -1 } - -func listArtifacts(client *github.Client, workflowRun *github.WorkflowRun) { - listOpt := github.ListOptions{PerPage: 100} - for { - artifactList, resp, err := client.Actions.ListWorkflowRunArtifacts(options.Context, *workflowRun.Repository.Owner.Login, *workflowRun.Repository.Name, *workflowRun.ID, &listOpt) - if resp == nil { - return - } - - if resp.StatusCode == 404 { - return - } - - if err != nil { - log.Error().Stack().Err(err).Msg("Failed fetching artifacts list") - return - } - - for _, artifact := range artifactList.Artifacts { - log.Debug().Str("name", *artifact.Name).Str("url", *artifact.ArchiveDownloadURL).Msg("Scan") - analyzeArtifact(client, workflowRun, artifact) - } - - if resp.NextPage == 0 { - break - } - listOpt.Page = resp.NextPage - } -} - -func analyzeArtifact(client *github.Client, workflowRun *github.WorkflowRun, artifact *github.Artifact) { - - url, resp, err := client.Actions.DownloadArtifact(options.Context, *workflowRun.Repository.Owner.Login, *workflowRun.Repository.Name, *artifact.ID, 5) - - if resp == nil { - log.Trace().Msg("analyzeArtifact Empty response") - return - } - - // already deleted, skip - if resp.StatusCode == 410 { - log.Debug().Str("workflowRunName", *workflowRun.Name).Msg("Skipped expired artifact") - return - } - - if err != nil { - log.Err(err).Msg("Failed getting artifact download URL") - return - } - - res, err := options.HttpClient.Get(url.String()) - - if err != nil { - log.Err(err).Str("workflow", url.String()).Msg("Failed downloading artifacts zip") - return - } - - if res.StatusCode == 200 { - body, err := io.ReadAll(res.Body) - if err != nil { - log.Err(err).Msg("Failed reading response log body") - return - } - zipListing, err := zip.NewReader(bytes.NewReader(body), int64(len(body))) - if err != nil { - log.Err(err).Str("url", url.String()).Msg("Failed creating zip reader") - return - } - - ctx := options.Context - group := parallel.Limited(ctx, options.MaxScanGoRoutines) - for _, file := range zipListing.File { - group.Go(func(ctx context.Context) { - fc, err := file.Open() - if err != nil { - log.Error().Stack().Err(err).Msg("Unable to open raw artifact zip file") - return - } - - content, err := io.ReadAll(fc) - if err != nil { - log.Error().Stack().Err(err).Msg("Unable to readAll artifact zip file") - return - } - - kind, _ := filetype.Match(content) - // do not scan https://pkg.go.dev/github.com/h2non/filetype#readme-supported-types - if kind == filetype.Unknown { - scanner.DetectFileHits(content, *workflowRun.HTMLURL, *workflowRun.Name, file.Name, "", options.TruffleHogVerification) - } else if filetype.IsArchive(content) { - scanner.HandleArchiveArtifact(file.Name, content, *workflowRun.HTMLURL, *workflowRun.Name, options.TruffleHogVerification) - } - fc.Close() - }) - } - - group.Wait() - } -}