Skip to content

Commit

Permalink
review changes
Browse files Browse the repository at this point in the history
  • Loading branch information
attiasas committed Nov 2, 2023
1 parent 9090909 commit d1a100a
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 14 deletions.
9 changes: 5 additions & 4 deletions utils/coreutils/techutils.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package coreutils

import (
"encoding/json"
"fmt"
"os"
"path/filepath"
Expand Down Expand Up @@ -207,14 +206,16 @@ func detectedTechnologiesListInPath(path string, recursive bool) (technologies [
// If recursive is true, the search will not be limited to files in the root path.
// If requestedTechs is empty, all technologies will be checked.
// If excludePathPattern is not empty, files/directories that match the wildcard pattern will be excluded from the search.
func DetectTechnologiesDescriptors(path string, recursive bool, requestedTechs []string, requestedDescriptors map[Technology][]string, excludePathPattern string) (technologiesDetected map[Technology]map[string][]string) {
func DetectTechnologiesDescriptors(path string, recursive bool, requestedTechs []string, requestedDescriptors map[Technology][]string, excludePathPattern string) (technologiesDetected map[Technology]map[string][]string, err error) {
filesList, err := fspatterns.ListFiles(path, recursive, false, true, true, excludePathPattern)
if err != nil {
return
}
workingDirectoryToIndicators, excludedTechAtWorkingDir := mapFilesToRelevantWorkingDirectories(filesList, requestedDescriptors)
strJson, err := json.MarshalIndent(workingDirectoryToIndicators, "", " ")
if errorutils.CheckError(err) == nil && len(workingDirectoryToIndicators) > 0 {
var strJson string
if strJson, err = GetJsonIndent(workingDirectoryToIndicators); err != nil {
return
} else if len(workingDirectoryToIndicators) > 0 {
log.Debug(fmt.Sprintf("mapped %d working directories with indicators/descriptors:\n%s", len(workingDirectoryToIndicators), strJson))
}
technologiesDetected = mapWorkingDirectoriesToTechnologies(workingDirectoryToIndicators, excludedTechAtWorkingDir, ToTechnologies(requestedTechs), requestedDescriptors)
Expand Down
11 changes: 11 additions & 0 deletions utils/coreutils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package coreutils

import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/fs"
Expand Down Expand Up @@ -502,6 +503,16 @@ func parseYesNo(s string, def bool) (ans, valid bool) {
return false, false
}

func GetJsonIndent(o any) (strJson string, err error) {
byteJson, err := json.MarshalIndent(o, "", " ")
if err != nil {
err = errorutils.CheckError(err)
return
}
strJson = string(byteJson)
return
}

func GetCliUserAgent() string {
if cliUserAgentVersion == "" {
return cliUserAgentName
Expand Down
20 changes: 11 additions & 9 deletions xray/commands/audit/scarunner.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,17 @@ func runScaScan(params *AuditParams, results *xrayutils.Results) (err error) {
if err != nil {
return
}

scans := getScaScansToPreform(currentWorkingDir, params)
if len(scans) == 0 {
log.Info("Couldn't determine a package manager or build tool used by this project. Skipping the SCA scan...")
return
}
logScanInfo(scans)
scanInfo, err := coreutils.GetJsonIndent(scans)
if err != nil {
return
}
log.Info(fmt.Sprintf("Preforming %d SCA scans:\n%s", len(scans), scanInfo))

defer func() {
// Make sure to return to the original working directory, executeScaScan may change it
Expand All @@ -70,7 +75,11 @@ func getScaScansToPreform(currentWorkingDir string, params *AuditParams) (scansT
recursive := len(currentWorkingDir) > 0
for _, requestedDirectory := range getRequestedDirectoriesToScan(currentWorkingDir, params) {
// Detect descriptors and technologies in the requested directory.
techToWorkingDirs := coreutils.DetectTechnologiesDescriptors(requestedDirectory, recursive, params.Technologies(), getRequestedDescriptors(params), getExcludePattern(params, recursive))
techToWorkingDirs, err := coreutils.DetectTechnologiesDescriptors(requestedDirectory, recursive, params.Technologies(), getRequestedDescriptors(params), getExcludePattern(params, recursive))
if err != nil {
log.Warn("Couldn't detect technologies in", requestedDirectory, "directory.", err.Error())
continue
}
// Create scans to preform
for tech, workingDirs := range techToWorkingDirs {
if tech == coreutils.Dotnet {
Expand Down Expand Up @@ -107,13 +116,6 @@ func getExcludePattern(params *AuditParams, recursive bool) string {
return fspatterns.PrepareExcludePathPattern(exclusions, clientutils.WildCardPattern, recursive)
}

func logScanInfo(scans []*xrayutils.ScaScanResult) {
scansJson, err := json.MarshalIndent(scans, "", " ")
if err == nil {
log.Info(fmt.Sprintf("Preforming %d SCA scans:\n%s", len(scans), string(scansJson)))
}
}

func getRequestedDirectoriesToScan(currentWorkingDir string, params *AuditParams) []string {
workingDirs := datastructures.MakeSet[string]()
for _, wd := range params.workingDirs {
Expand Down
3 changes: 2 additions & 1 deletion xray/commands/audit/scarunner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,8 @@ func TestGetScaScansToPreform(t *testing.T) {

for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
result := getScaScansToPreform(test.wd, test.params())
result, err := getScaScansToPreform(test.wd, test.params())

Check failure on line 259 in xray/commands/audit/scarunner_test.go

View workflow job for this annotation

GitHub Actions / Go-Sec

assignment mismatch: 2 variables but getScaScansToPreform returns 1 value

Check failure on line 259 in xray/commands/audit/scarunner_test.go

View workflow job for this annotation

GitHub Actions / Static-Check

assignment mismatch: 2 variables but getScaScansToPreform returns 1 value (typecheck)
assert.NoError(t, err)
for i := range result {
sort.Strings(result[i].Descriptors)
sort.Strings(test.expected[i].Descriptors)
Expand Down

0 comments on commit d1a100a

Please sign in to comment.