diff --git a/pkg/engine/inspector.go b/pkg/engine/inspector.go index a22f05bd660..a5bfb77c2b1 100644 --- a/pkg/engine/inspector.go +++ b/pkg/engine/inspector.go @@ -43,6 +43,14 @@ var ErrNoResult = errors.New("query: not result") // ErrInvalidResult - error representing invalid result var ErrInvalidResult = errors.New("query: invalid result format") +// QueryLoader is responsible for loading the queries for the inspector +type QueryLoader struct { + commonLibrary source.RegoLibraries + platformLibraries map[string]source.RegoLibraries + querySum int + QueriesMetadata []model.QueryMetadata +} + // VulnerabilityBuilder represents a function that will build a vulnerability type VulnerabilityBuilder func(ctx *QueryContext, tracker Tracker, v interface{}, detector *detector.DetectLine) (model.Vulnerability, error) @@ -55,7 +63,7 @@ type preparedQuery struct { // Inspector represents a list of compiled queries, a builder for vulnerabilities, an information tracker // a flag to enable coverage and the coverage report if it is enabled type Inspector struct { - queries []*preparedQuery + QueryLoader *QueryLoader vb VulnerabilityBuilder tracker Tracker failedQueries map[string]error @@ -113,16 +121,15 @@ func NewInspector( return nil, errors.Wrap(err, "failed to get library") } platformLibraries := getPlatformLibraries(queriesSource, queries) - opaQueries := prepareQueries(ctx, queries, commonLibrary, platformLibraries, tracker) - failedQueries := make(map[string]error) + queryLoader := prepareQueries(queries, commonLibrary, platformLibraries, tracker) - queriesNumber := sumAllAggregatedQueries(opaQueries) + failedQueries := make(map[string]error) metrics.Metric.Stop() log.Info(). - Msgf("Inspector initialized, number of queries=%d", queriesNumber) + Msgf("Inspector initialized, number of queries=%d", queryLoader.querySum) lineDetector := detector.NewDetectLine(tracker.GetOutputLines()). Add(helm.DetectKindLine{}, model.KindHELM). @@ -133,7 +140,7 @@ func NewInspector( log.Info().Msgf("Query execution timeout=%v", queryExecTimeout) return &Inspector{ - queries: opaQueries, + QueryLoader: &queryLoader, vb: vb, tracker: tracker, failedQueries: failedQueries, @@ -165,14 +172,6 @@ func getPlatformLibraries(queriesSource source.QueriesSource, queries []model.Qu return platformLibraries } -func sumAllAggregatedQueries(opaQueries []*preparedQuery) int { - sum := 0 - for _, query := range opaQueries { - sum += query.metadata.Aggregation - } - return sum -} - // Inspect scan files and return the a list of vulnerabilities found on the process func (c *Inspector) Inspect( ctx context.Context, @@ -191,9 +190,20 @@ func (c *Inspector) Inspect( var vulnerabilities []model.Vulnerability vulnerabilities = make([]model.Vulnerability, 0) - for _, query := range c.getQueriesByPlat(platforms) { + queries := c.getQueriesByPlat(platforms) + for i, queryMeta := range queries { currentQuery <- 1 + queryOpa, err := c.QueryLoader.loadQuery(ctx, &queries[i]) + if err != nil { + continue + } + + query := &preparedQuery{ + opaQuery: *queryOpa, + metadata: queryMeta, + } + vuls, err := c.doRun(&QueryContext{ ctx: ctx, scanID: scanID, @@ -228,19 +238,19 @@ func (c *Inspector) Inspect( // LenQueriesByPlat returns the number of queries by platforms func (c *Inspector) LenQueriesByPlat(platforms []string) int { count := 0 - for _, query := range c.queries { - if contains(platforms, query.metadata.Platform) { - c.tracker.TrackQueryExecuting(query.metadata.Aggregation) + for _, query := range c.QueryLoader.QueriesMetadata { + if contains(platforms, query.Platform) { + c.tracker.TrackQueryExecuting(query.Aggregation) count++ } } return count } -func (c *Inspector) getQueriesByPlat(platforms []string) []*preparedQuery { - queries := make([]*preparedQuery, 0) - for _, query := range c.queries { - if contains(platforms, query.metadata.Platform) { +func (c *Inspector) getQueriesByPlat(platforms []string) []model.QueryMetadata { + queries := make([]model.QueryMetadata, 0) + for _, query := range c.QueryLoader.QueriesMetadata { + if contains(platforms, query.Platform) { queries = append(queries, query) } } @@ -414,64 +424,66 @@ func ShouldSkipVulnerability(command model.CommentsCommands, queryID string) boo return false } -func prepareQueries( - ctx context.Context, - queries []model.QueryMetadata, - commonLibrary source.RegoLibraries, - platformLibraries map[string]source.RegoLibraries, - tracker Tracker) []*preparedQuery { - opaQueries := make([]*preparedQuery, 0, len(queries)) +func prepareQueries(queries []model.QueryMetadata, commonLibrary source.RegoLibraries, + platformLibraries map[string]source.RegoLibraries, tracker Tracker) QueryLoader { + // track queries loaded + sum := 0 for _, metadata := range queries { - platformGeneralQuery, ok := platformLibraries[metadata.Platform] - if !ok { - log.Err(errors.New("failed to get platform library")). - Msgf("Inspector failed to get library for query: %s, with platform: %s", metadata.Query, metadata.Platform) - continue - } + tracker.TrackQueryLoad(metadata.Aggregation) + sum += metadata.Aggregation + } + return QueryLoader{ + commonLibrary: commonLibrary, + platformLibraries: platformLibraries, + querySum: sum, + QueriesMetadata: queries, + } +} - select { - case <-ctx.Done(): - return nil - default: - var opaQuery rego.PreparedEvalQuery - mergedInputData, err := source.MergeInputData(platformGeneralQuery.LibraryInputData, metadata.InputData) - if err != nil { - log.Debug().Msg("Could not merge platform library input data") - } - mergedInputData, err = source.MergeInputData(commonLibrary.LibraryInputData, mergedInputData) - if err != nil { - log.Debug().Msg("Could not merge common library input data") - } - store := inmem.NewFromReader(bytes.NewBufferString(mergedInputData)) - opaQuery, err = rego.New( - rego.Query(regoQuery), - rego.Module("Common", commonLibrary.LibraryCode), - rego.Module("Generic", platformGeneralQuery.LibraryCode), - rego.Module(metadata.Query, metadata.Content), - rego.Store(store), - rego.UnsafeBuiltins(unsafeRegoFunctions), - ).PrepareForEval(ctx) - - if err != nil { - sentryReport.ReportSentry(&sentryReport.Report{ - Message: fmt.Sprintf("Inspector failed to prepare query for evaluation, query=%s", metadata.Query), - Err: err, - Location: "func NewInspector()", - Query: metadata.Query, - Metadata: metadata.Metadata, - Platform: metadata.Platform, - }, true) - - continue - } +// Load the querie into memory so it can be freed when not used anymore +func (q QueryLoader) loadQuery(ctx context.Context, query *model.QueryMetadata) (*rego.PreparedEvalQuery, error) { + opaQuery := rego.PreparedEvalQuery{} + + platformGeneralQuery, ok := q.platformLibraries[query.Platform] + if !ok { + return nil, errors.New("failed to get platform library") + } + + select { + case <-ctx.Done(): + return nil, nil + default: + mergedInputData, err := source.MergeInputData(platformGeneralQuery.LibraryInputData, query.InputData) + if err != nil { + log.Debug().Msg("Could not merge platform library input data") + } + mergedInputData, err = source.MergeInputData(q.commonLibrary.LibraryInputData, mergedInputData) + if err != nil { + log.Debug().Msg("Could not merge common library input data") + } + store := inmem.NewFromReader(bytes.NewBufferString(mergedInputData)) + opaQuery, err = rego.New( + rego.Query(regoQuery), + rego.Module("Common", q.commonLibrary.LibraryCode), + rego.Module("Generic", platformGeneralQuery.LibraryCode), + rego.Module(query.Query, query.Content), + rego.Store(store), + rego.UnsafeBuiltins(unsafeRegoFunctions), + ).PrepareForEval(ctx) - tracker.TrackQueryLoad(metadata.Aggregation) + if err != nil { + sentryReport.ReportSentry(&sentryReport.Report{ + Message: fmt.Sprintf("Inspector failed to prepare query for evaluation, query=%s", query.Query), + Err: err, + Location: "func NewInspector()", + Query: query.Query, + Metadata: query.Metadata, + Platform: query.Platform, + }, true) - opaQueries = append(opaQueries, &preparedQuery{ - opaQuery: opaQuery, - metadata: metadata, - }) + return nil, err } + + return &opaQuery, nil } - return opaQueries } diff --git a/pkg/engine/inspector_test.go b/pkg/engine/inspector_test.go index c797944e412..5b8b26d3049 100644 --- a/pkg/engine/inspector_test.go +++ b/pkg/engine/inspector_test.go @@ -26,7 +26,6 @@ import ( "github.com/stretchr/testify/require" "github.com/open-policy-agent/opa/cover" - "github.com/open-policy-agent/opa/rego" ) // TestInspector_EnableCoverageReport tests the functions [EnableCoverageReport()] and all the methods called by them @@ -34,7 +33,7 @@ func TestInspector_EnableCoverageReport(t *testing.T) { log.Logger = log.Output(zerolog.ConsoleWriter{Out: io.Discard}) type fields struct { - queries []*preparedQuery + queryLoader *QueryLoader vb VulnerabilityBuilder tracker Tracker enableCoverageReport bool @@ -48,7 +47,7 @@ func TestInspector_EnableCoverageReport(t *testing.T) { { name: "enable_coverage_report_1", fields: fields{ - queries: []*preparedQuery{}, + queryLoader: &QueryLoader{}, vb: DefaultVulnerabilityBuilder, tracker: &tracker.CITracker{}, enableCoverageReport: false, @@ -60,7 +59,7 @@ func TestInspector_EnableCoverageReport(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := &Inspector{ - queries: tt.fields.queries, + QueryLoader: tt.fields.queryLoader, vb: tt.fields.vb, tracker: tt.fields.tracker, enableCoverageReport: tt.fields.enableCoverageReport, @@ -82,7 +81,7 @@ func TestInspector_GetCoverageReport(t *testing.T) { } type fields struct { - queries []*preparedQuery + queryLoader *QueryLoader vb VulnerabilityBuilder tracker Tracker enableCoverageReport bool @@ -96,7 +95,7 @@ func TestInspector_GetCoverageReport(t *testing.T) { { name: "get_coverage_report_1", fields: fields{ - queries: []*preparedQuery{}, + queryLoader: &QueryLoader{}, vb: DefaultVulnerabilityBuilder, tracker: &tracker.CITracker{}, enableCoverageReport: false, @@ -108,7 +107,7 @@ func TestInspector_GetCoverageReport(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := &Inspector{ - queries: tt.fields.queries, + QueryLoader: tt.fields.queryLoader, vb: tt.fields.vb, tracker: tt.fields.tracker, enableCoverageReport: tt.fields.enableCoverageReport, @@ -127,38 +126,12 @@ func TestInspect(t *testing.T) { //nolint Add(helm.DetectKindLine{}, model.KindHELM). Add(docker.DetectKindLine{}, model.KindDOCKER) ctx := context.Background() - opaQuery, _ := rego.New( - rego.Query(regoQuery), - rego.Module("add_instead_of_copy", `package Cx - - CxPolicy [ result ] { - resource := input.document[i].command[name][_] - resource.Cmd == "add" - not tarfileChecker(resource.Value, ".tar") - not tarfileChecker(resource.Value, ".tar.") - - result := { - "documentId": input.document[i].id, - "searchKey": sprintf("{{%s}}", [resource.Original]), - "issueType": "IncorrectValue", - "keyExpectedValue": sprintf("'COPY' %s", [resource.Value[0]]), - "keyActualValue": sprintf("'ADD' %s", [resource.Value[0]]) - } - } - - tarfileChecker(cmdValue, elem) { - contains(cmdValue[_], elem) - }`), - rego.UnsafeBuiltins(unsafeRegoFunctions), - ).PrepareForEval(ctx) - - opaQueries := make([]*preparedQuery, 0, 1) - opaQueries = append(opaQueries, &preparedQuery{ - opaQuery: opaQuery, - metadata: model.QueryMetadata{ - Query: "add_instead_of_copy", - InputData: "{}", - Content: `package Cx + opaQueries := make([]model.QueryMetadata, 0, 1) + opaQueries = append(opaQueries, model.QueryMetadata{ + Query: "add_instead_of_copy", + Platform: "Dockerfile", + InputData: "{}", + Content: `package Cx CxPolicy [ result ] { resource := input.document[i].command[name][_] @@ -178,7 +151,6 @@ func TestInspect(t *testing.T) { //nolint tarfileChecker(cmdValue, elem) { contains(cmdValue[_], elem) }`, - }, }) mockedFileMetadataDocument := map[string]interface{}{ @@ -202,7 +174,7 @@ func TestInspect(t *testing.T) { //nolint } type fields struct { - queries []*preparedQuery + queryLoader QueryLoader vb VulnerabilityBuilder tracker Tracker enableCoverageReport bool @@ -224,7 +196,19 @@ func TestInspect(t *testing.T) { //nolint { name: "TestInspect", fields: fields{ - queries: opaQueries, + queryLoader: QueryLoader{ + QueriesMetadata: opaQueries, + commonLibrary: source.RegoLibraries{ + LibraryCode: "package generic.common", + LibraryInputData: "", + }, + platformLibraries: map[string]source.RegoLibraries{ + "Dockerfile": { + LibraryCode: "package generic.dockerfile", + LibraryInputData: "", + }, + }, + }, vb: DefaultVulnerabilityBuilder, tracker: &tracker.CITracker{}, enableCoverageReport: true, @@ -274,7 +258,19 @@ func TestInspect(t *testing.T) { //nolint { name: "TestInspectExcludeResult", fields: fields{ - queries: opaQueries, + queryLoader: QueryLoader{ + QueriesMetadata: opaQueries, + commonLibrary: source.RegoLibraries{ + LibraryCode: "package generic.common", + LibraryInputData: "", + }, + platformLibraries: map[string]source.RegoLibraries{ + "Dockerfile": { + LibraryCode: "package generic.dockerfile", + LibraryInputData: "", + }, + }, + }, vb: DefaultVulnerabilityBuilder, tracker: &tracker.CITracker{}, enableCoverageReport: true, @@ -305,12 +301,12 @@ func TestInspect(t *testing.T) { //nolint currentQuery := make(chan int64) wg.Add(1) proBarBuilder := progress.InitializePbBuilder(true, true, true) - progressBar := proBarBuilder.BuildCounter("Executing queries: ", len(tt.fields.queries), wg, currentQuery) + progressBar := proBarBuilder.BuildCounter("Executing queries: ", len(tt.fields.queryLoader.QueriesMetadata), wg, currentQuery) go progressBar.Start() t.Run(tt.name, func(t *testing.T) { c := &Inspector{ - queries: tt.fields.queries, + QueryLoader: &tt.fields.queryLoader, vb: tt.fields.vb, tracker: tt.fields.tracker, enableCoverageReport: tt.fields.enableCoverageReport, @@ -320,7 +316,7 @@ func TestInspect(t *testing.T) { //nolint queryExecTimeout: time.Duration(60) * time.Second, } got, err := c.Inspect(tt.args.ctx, tt.args.scanID, tt.args.files, - []string{filepath.FromSlash("assets/queries/")}, []string{""}, currentQuery) + []string{filepath.FromSlash("assets/queries/")}, []string{"Dockerfile"}, currentQuery) if tt.wantErr { if err == nil { t.Errorf("Inspector.Inspect() = %v,\nwant %v", err, tt.want) @@ -361,44 +357,39 @@ func TestNewInspector(t *testing.T) { // nolint Types: []string{""}, } vbs := DefaultVulnerabilityBuilder - opaQueries := make([]*preparedQuery, 0, 1) - opaQueries = append(opaQueries, &preparedQuery{ - opaQuery: rego.PreparedEvalQuery{}, - metadata: model.QueryMetadata{ - Query: "all_auth_users_get_read_access", - Content: string(contentByte), - InputData: "{}", - Platform: "terraform", - Metadata: map[string]interface{}{ - "id": "57b9893d-33b1-4419-bcea-b828fb87e318", - "queryName": "All Auth Users Get Read Access", - "severity": model.SeverityHigh, - "category": "Access Control", - "descriptionText": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", // nolint - "descriptionUrl": "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket#acl", - "platform": "Terraform", - }, - Aggregation: 1, + opaQueries := make([]model.QueryMetadata, 0, 1) + opaQueries = append(opaQueries, model.QueryMetadata{ + Query: "all_auth_users_get_read_access", + Content: string(contentByte), + InputData: "{}", + Platform: "terraform", + Metadata: map[string]interface{}{ + "id": "57b9893d-33b1-4419-bcea-b828fb87e318", + "queryName": "All Auth Users Get Read Access", + "severity": model.SeverityHigh, + "category": "Access Control", + "descriptionText": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", // nolint + "descriptionUrl": "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket#acl", + "platform": "Terraform", }, + Aggregation: 1, }) - opaQueries = append(opaQueries, &preparedQuery{ - opaQuery: rego.PreparedEvalQuery{}, - metadata: model.QueryMetadata{ - Query: "common_query_test", - Content: string(contentByte2), - InputData: "{}", - Platform: "common", - Metadata: map[string]interface{}{ - "id": "4a3aa2b5-9c87-452c-a3ea-f3e9e3573874", - "queryName": "Common Query Test", - "severity": model.SeverityHigh, - "category": "Best Practices", - "descriptionText": "", - "descriptionUrl": "", - "platform": "Common", - }, - Aggregation: 1, + + opaQueries = append(opaQueries, model.QueryMetadata{ + Query: "common_query_test", + Content: string(contentByte2), + InputData: "{}", + Platform: "common", + Metadata: map[string]interface{}{ + "id": "4a3aa2b5-9c87-452c-a3ea-f3e9e3573874", + "queryName": "Common Query Test", + "severity": model.SeverityHigh, + "category": "Best Practices", + "descriptionText": "", + "descriptionUrl": "", + "platform": "Common", }, + Aggregation: 1, }) type args struct { ctx context.Context @@ -437,7 +428,9 @@ func TestNewInspector(t *testing.T) { // nolint want: &Inspector{ vb: vbs, tracker: track, - queries: opaQueries, + QueryLoader: &QueryLoader{ + QueriesMetadata: opaQueries, + }, }, wantErr: false, }, @@ -457,14 +450,14 @@ func TestNewInspector(t *testing.T) { // nolint return } - require.Equal(t, len(tt.want.queries), len(got.queries)) + require.Equal(t, len(tt.want.QueryLoader.QueriesMetadata), len(got.QueryLoader.QueriesMetadata)) - for idx := 0; idx < len(tt.want.queries); idx++ { - gotStrMetadata, err := test.StringifyStruct(got.queries[idx].metadata) + for idx := 0; idx < len(tt.want.QueryLoader.QueriesMetadata); idx++ { + gotStrMetadata, err := test.StringifyStruct(got.QueryLoader.QueriesMetadata[idx].Metadata) require.Nil(t, err) - wantStrMetadata, err := test.StringifyStruct(tt.want.queries[idx].metadata) + wantStrMetadata, err := test.StringifyStruct(tt.want.QueryLoader.QueriesMetadata[idx].Metadata) require.Nil(t, err) - if !reflect.DeepEqual(got.queries[idx].metadata, tt.want.queries[idx].metadata) { + if !reflect.DeepEqual(got.QueryLoader.QueriesMetadata[idx].Metadata, tt.want.QueryLoader.QueriesMetadata[idx].Metadata) { t.Errorf("NewInspector() metadata: got = %v,\n want = %v", gotStrMetadata, wantStrMetadata) } } @@ -758,3 +751,76 @@ func TestInspector_checkComment(t *testing.T) { }) } } + +func TestInspector_prepareQueries(t *testing.T) { + type args struct { + queries []model.QueryMetadata + commonLibrary source.RegoLibraries + platformLibraries map[string]source.RegoLibraries + tracker Tracker + } + + tests := []struct { + name string + args args + want QueryLoader + }{ + { + name: "test_prepareQueries", + args: args{ + queries: []model.QueryMetadata{ + { + Metadata: map[string]interface{}{ + "id": "ffdf4b37-7703-4dfe-a682-9d2e99bc6c09", + "aggregation": 3, + }, + Query: `package main`, + Aggregation: 3, + }, + }, + commonLibrary: source.RegoLibraries{ + LibraryCode: "", + LibraryInputData: "{}", + }, + platformLibraries: map[string]source.RegoLibraries{ + "Dockerfile": { + LibraryCode: "", + LibraryInputData: "{}", + }, + }, + tracker: &tracker.CITracker{}, + }, + want: QueryLoader{ + QueriesMetadata: []model.QueryMetadata{ + { + Metadata: map[string]interface{}{ + "id": "ffdf4b37-7703-4dfe-a682-9d2e99bc6c09", + "aggregation": 3, + }, + Query: `package main`, + Aggregation: 3, + }, + }, + querySum: 3, + commonLibrary: source.RegoLibraries{ + LibraryCode: "", + LibraryInputData: "{}", + }, + platformLibraries: map[string]source.RegoLibraries{ + "Dockerfile": { + LibraryCode: "", + LibraryInputData: "{}", + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := prepareQueries(tt.args.queries, tt.args.commonLibrary, tt.args.platformLibraries, tt.args.tracker); !reflect.DeepEqual(got, tt.want) { + t.Errorf("prepareQueries() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/kics/service.go b/pkg/kics/service.go index fd868bf7406..4370359b4fe 100644 --- a/pkg/kics/service.go +++ b/pkg/kics/service.go @@ -58,6 +58,7 @@ type Service struct { files model.FileMetadatas } +// PrepareSources will prepare the sources to be scanned func (s *Service) PrepareSources(ctx context.Context, scanID string, wg *sync.WaitGroup, errCh chan<- error) { defer wg.Done() // CxSAST query under review diff --git a/pkg/kics/service_test.go b/pkg/kics/service_test.go index 134b292ce90..43cc445fbab 100644 --- a/pkg/kics/service_test.go +++ b/pkg/kics/service_test.go @@ -53,7 +53,11 @@ func TestService(t *testing.T) { //nolint { name: "service", fields: fields{ - Inspector: &engine.Inspector{}, + Inspector: &engine.Inspector{ + QueryLoader: &engine.QueryLoader{ + QueriesMetadata: make([]model.QueryMetadata, 0), + }, + }, SecretsInspector: &secrets.Inspector{}, Parser: mockParser, Tracker: &tracker.CITracker{}, diff --git a/pkg/model/model.go b/pkg/model/model.go index 5cade57f2fb..1c469db0e84 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -201,8 +201,8 @@ func (e Extensions) Include(ext string) bool { // LineObject is the struct that will hold line information for each key type LineObject struct { - Line int `json:"_kics_line"` - Arr []map[string]LineObject `json:"_kics_arr,omitempty"` + Line int `json:"_kics_line"` + Arr []map[string]*LineObject `json:"_kics_arr,omitempty"` } // MatchedFilesRegex returns the regex rule to identify if an extension is supported or not diff --git a/pkg/model/model_yaml.go b/pkg/model/model_yaml.go index 605360cc90f..b60f7cf31ec 100644 --- a/pkg/model/model_yaml.go +++ b/pkg/model/model_yaml.go @@ -88,13 +88,13 @@ func unmarshal(val *yaml.Node) interface{} { // getLines creates the map containing the line information for the yaml Node // def is the line to be used as "_kics__default" -func getLines(val *yaml.Node, def int) map[string]LineObject { - lineMap := make(map[string]LineObject) +func getLines(val *yaml.Node, def int) map[string]*LineObject { + lineMap := make(map[string]*LineObject) // line information map - lineMap["_kics__default"] = LineObject{ + lineMap["_kics__default"] = &LineObject{ Line: def, - Arr: []map[string]LineObject{}, + Arr: []map[string]*LineObject{}, } // if yaml Node is an Array use func getSeqLines @@ -104,7 +104,7 @@ func getLines(val *yaml.Node, def int) map[string]LineObject { // iterate two by two, since first iteration is the key and the second is the value for i := 0; i < len(val.Content); i += 2 { - lineArr := make([]map[string]LineObject, 0) + lineArr := make([]map[string]*LineObject, 0) // in case the value iteration is an array call getLines for each iteration of the array if val.Content[i+1].Kind == yaml.SequenceNode { for _, contentEntry := range val.Content[i+1].Content { @@ -119,7 +119,7 @@ func getLines(val *yaml.Node, def int) map[string]LineObject { } // line information map of each key of the yaml Node - lineMap["_kics_"+val.Content[i].Value] = LineObject{ + lineMap["_kics_"+val.Content[i].Value] = &LineObject{ Line: val.Content[i].Line, Arr: lineArr, } @@ -130,9 +130,9 @@ func getLines(val *yaml.Node, def int) map[string]LineObject { // getSeqLines iterates through the elements of an Array // creating a map with each iteration lines information -func getSeqLines(val *yaml.Node, def int) map[string]LineObject { - lineMap := make(map[string]LineObject) - lineArr := make([]map[string]LineObject, 0) +func getSeqLines(val *yaml.Node, def int) map[string]*LineObject { + lineMap := make(map[string]*LineObject) + lineArr := make([]map[string]*LineObject, 0) // get line information slice of every element in the array for _, cont := range val.Content { @@ -140,7 +140,7 @@ func getSeqLines(val *yaml.Node, def int) map[string]LineObject { } // create line information of array with its line and elements line information - lineMap["_kics__default"] = LineObject{ + lineMap["_kics__default"] = &LineObject{ Line: def, Arr: lineArr, } diff --git a/pkg/parser/grpc/converter/converter.go b/pkg/parser/grpc/converter/converter.go index 9670b1bef66..1c85f6d649b 100644 --- a/pkg/parser/grpc/converter/converter.go +++ b/pkg/parser/grpc/converter/converter.go @@ -148,7 +148,7 @@ func Convert(nodes *proto.Proto) (file *JSONProto, linesIgnore []int) { serviceLines := make(map[string]model.LineObject) importLines := make(map[string]model.LineObject) - defaultArr := make([]map[string]model.LineObject, 0) + defaultArr := make([]map[string]*model.LineObject, 0) for _, elem := range nodes.Elements { switch element := elem.(type) { @@ -157,14 +157,14 @@ func Convert(nodes *proto.Proto) (file *JSONProto, linesIgnore []int) { jproto.Messages[element.Name] = jproto.convertMessage(element) messageLines["_kics_"+element.Name] = model.LineObject{ Line: element.Position.Line, - Arr: make([]map[string]model.LineObject, 0), + Arr: make([]map[string]*model.LineObject, 0), } case *proto.Service: jproto.processCommentProto(element.Comment, element.Position.Line, element) jproto.convertService(element) serviceLines["_kics_"+element.Name] = model.LineObject{ Line: element.Position.Line, - Arr: make([]map[string]model.LineObject, 0), + Arr: make([]map[string]*model.LineObject, 0), } case *proto.Package: jproto.processCommentProto(element.Comment, element.Position.Line, element) @@ -179,12 +179,12 @@ func Convert(nodes *proto.Proto) (file *JSONProto, linesIgnore []int) { } importLines["_kics_"+element.Filename] = model.LineObject{ Line: element.Position.Line, - Arr: make([]map[string]model.LineObject, 0), + Arr: make([]map[string]*model.LineObject, 0), } case *proto.Option: jproto.processCommentProto(element.Comment, element.Position.Line, element) jproto.Options = append(jproto.Options, jproto.convertSingleOption(element)) - defaultArr = append(defaultArr, map[string]model.LineObject{ + defaultArr = append(defaultArr, map[string]*model.LineObject{ element.Name: { Line: element.Position.Line, }, @@ -194,7 +194,7 @@ func Convert(nodes *proto.Proto) (file *JSONProto, linesIgnore []int) { jproto.Enum[element.Name] = jproto.convertEnum(element) enumLines["_kics_"+element.Name] = model.LineObject{ Line: element.Position.Line, - Arr: make([]map[string]model.LineObject, 0), + Arr: make([]map[string]*model.LineObject, 0), } case *proto.Syntax: jproto.processCommentProto(element.Comment, element.Position.Line, element) @@ -232,7 +232,7 @@ func (j *JSONProto) convertMessage(n *proto.Message) Message { Lines: make(map[string]model.LineObject), } - defaultArr := make([]map[string]model.LineObject, 0) + defaultArr := make([]map[string]*model.LineObject, 0) for _, field := range n.Elements { switch field := field.(type) { @@ -254,7 +254,7 @@ func (j *JSONProto) convertMessage(n *proto.Message) Message { case *proto.Reserved: j.processCommentProto(field.Comment, field.Position.Line, field) message.Reserved = append(message.Reserved, j.convertReserved(field)) - defaultArr = append(defaultArr, map[string]model.LineObject{ + defaultArr = append(defaultArr, map[string]*model.LineObject{ "Reserved": { Line: field.Position.Line, }, @@ -319,7 +319,7 @@ func (j *JSONProto) convertEnum(n *proto.Enum) Enum { Lines: make(map[string]model.LineObject), } - defaultArr := make([]map[string]model.LineObject, 0) + defaultArr := make([]map[string]*model.LineObject, 0) for _, elem := range n.Elements { switch elem := elem.(type) { @@ -338,7 +338,7 @@ func (j *JSONProto) convertEnum(n *proto.Enum) Enum { case *proto.Reserved: j.processCommentProto(elem.Comment, elem.Position.Line, elem) enum.Reserved = append(enum.Reserved, j.convertReserved(elem)) - defaultArr = append(defaultArr, map[string]model.LineObject{ + defaultArr = append(defaultArr, map[string]*model.LineObject{ "Reserved": { Line: elem.Position.Line, }, @@ -370,7 +370,7 @@ func (j *JSONProto) convertOneOf(n *proto.Oneof) OneOf { } oneof.Lines["_kics__default"] = model.LineObject{ Line: n.Position.Line, - Arr: make([]map[string]model.LineObject, 0), + Arr: make([]map[string]*model.LineObject, 0), } for _, elem := range n.Elements { switch elem := elem.(type) { @@ -420,7 +420,7 @@ func (j *JSONProto) convertService(n *proto.Service) { service.Lines["_kics__default"] = model.LineObject{ Line: n.Position.Line, - Arr: make([]map[string]model.LineObject, 0), + Arr: make([]map[string]*model.LineObject, 0), } for _, rpc := range n.Elements { diff --git a/pkg/parser/json/json_line.go b/pkg/parser/json/json_line.go index b191b950cb2..19236c16d6c 100644 --- a/pkg/parser/json/json_line.go +++ b/pkg/parser/json/json_line.go @@ -183,12 +183,12 @@ func (j *jsonLine) setLineInfo(doc map[string]interface{}) map[string]interface{ // def is the line of the key // index is used in case of an array, otherwhise should be 0 // father is the path to the key -func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, pop bool) map[string]model.LineObject { - lineMap := make(map[string]model.LineObject) +func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, pop bool) map[string]*model.LineObject { + lineMap := make(map[string]*model.LineObject) // set the line information of val - lineMap["_kics__default"] = model.LineObject{ + lineMap["_kics__default"] = &model.LineObject{ Line: def, - Arr: []map[string]model.LineObject{}, + Arr: []map[string]*model.LineObject{}, } // iterate through the values of the object @@ -204,7 +204,7 @@ func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, p continue } - lineArr := make([]map[string]model.LineObject, 0) + lineArr := make([]map[string]*model.LineObject, 0) lineNr := line.(*fifo).head() if pop { lineNr = line.(*fifo).pop() @@ -216,10 +216,10 @@ func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, p lineArr = j.setSeqLines(v, lineNr, father, key, lineArr) // value is an object and must setLines for each element of the object case map[string]interface{}: - v["_kics_lines"] = j.setLine(v, lineNr, father+"."+key, false) + v["_kics_lines"] = j.setLine(v, lineNr, fmt.Sprintf("%s.%s", father, key), false) default: // value as no childs - lineMap["_kics_"+key] = model.LineObject{ + lineMap[fmt.Sprintf("_kics_%s", key)] = &model.LineObject{ Line: lineNr, Arr: lineArr, } @@ -228,7 +228,7 @@ func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, p // set line information of value with its default line and // if present array elements line informations - lineMap["_kics_"+key] = model.LineObject{ + lineMap[fmt.Sprintf("_kics_%s", key)] = &model.LineObject{ Line: lineNr, Arr: lineArr, } @@ -238,7 +238,7 @@ func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, p // setSeqLines sets the elements lines information for value of type array func (j *jsonLine) setSeqLines(v []interface{}, def int, father, key string, - lineArr []map[string]model.LineObject) []map[string]model.LineObject { + lineArr []map[string]*model.LineObject) []map[string]*model.LineObject { // update father path with key fatherKey := father + "." + key @@ -256,11 +256,11 @@ func (j *jsonLine) setSeqLines(v []interface{}, def int, father, key string, default: stringedCon := fmt.Sprint(con) // check if element is present in line info map - if lineStr, ok2 := j.LineInfo[stringedCon][father+"."+key]; ok2 { + if lineStr, ok2 := j.LineInfo[stringedCon][fmt.Sprintf("%s.%s", father, key)]; ok2 { if len(lineStr.(*fifo).Value) == 0 { continue } - lineArr = append(lineArr, map[string]model.LineObject{ + lineArr = append(lineArr, map[string]*model.LineObject{ "_kics__default": { Line: lineStr.(*fifo).pop(), }, diff --git a/pkg/parser/json/parser.go b/pkg/parser/json/parser.go index f16014d0ec9..f884a350caa 100644 --- a/pkg/parser/json/parser.go +++ b/pkg/parser/json/parser.go @@ -5,6 +5,7 @@ import ( "encoding/json" "github.com/Checkmarx/kics/pkg/model" + "github.com/mailru/easyjson" ) // Parser defines a parser type @@ -20,7 +21,7 @@ func (p *Parser) Resolve(fileContent []byte, filename string) (*[]byte, error) { // Parse parses json file and returns it as a Document func (p *Parser) Parse(_ string, fileContent []byte) ([]model.Document, []int, error) { r := model.Document{} - err := json.Unmarshal(fileContent, &r) + err := easyjson.Unmarshal(fileContent, &r) if err != nil { r := []model.Document{} err = json.Unmarshal(fileContent, &r) diff --git a/pkg/parser/terraform/converter/default.go b/pkg/parser/terraform/converter/default.go index cdacd03aa86..e5be189e012 100644 --- a/pkg/parser/terraform/converter/default.go +++ b/pkg/parser/terraform/converter/default.go @@ -133,18 +133,18 @@ func (c *converter) convertBody(body *hclsyntax.Body, defLine int) (model.Docume } // getArrLines will get line information for the array elements -func (c *converter) getArrLines(expr hclsyntax.Expression) []map[string]model.LineObject { - arr := make([]map[string]model.LineObject, 0) +func (c *converter) getArrLines(expr hclsyntax.Expression) []map[string]*model.LineObject { + arr := make([]map[string]*model.LineObject, 0) if v, ok := expr.(*hclsyntax.TupleConsExpr); ok { for _, ex := range v.Exprs { - arrEx := make(map[string]model.LineObject) + arrEx := make(map[string]*model.LineObject) // set default line of array - arrEx["_kics__default"] = model.LineObject{ + arrEx["_kics__default"] = &model.LineObject{ Line: ex.Range().Start.Line, } switch valType := ex.(type) { case *hclsyntax.ObjectConsExpr: - arrEx["_kics__default"] = model.LineObject{ + arrEx["_kics__default"] = &model.LineObject{ Line: ex.Range().Start.Line + 1, } // set lines for array elements @@ -159,13 +159,13 @@ func (c *converter) getArrLines(expr hclsyntax.Expression) []map[string]model.Li }, false) return nil } - arrEx["_kics_"+key] = model.LineObject{ + arrEx["_kics_"+key] = &model.LineObject{ Line: item.KeyExpr.Range().Start.Line, } } case *hclsyntax.TupleConsExpr: // set lines for array elements if type is different than array, map/object - arrEx["_kics__default"] = model.LineObject{ + arrEx["_kics__default"] = &model.LineObject{ Arr: c.getArrLines(valType), } } diff --git a/pkg/progress/circle/circle_progress.go b/pkg/progress/circle/circle_progress.go index 9b0a0b520c5..182bdee1af7 100644 --- a/pkg/progress/circle/circle_progress.go +++ b/pkg/progress/circle/circle_progress.go @@ -3,13 +3,15 @@ package circle import ( "fmt" "io" + "time" "github.com/Checkmarx/kics/internal/constants" "github.com/cheggaaa/pb/v3" ) const ( - barWidth = 0 + barWidth = 0 + sleepTimeout = 100 ) // ProgressBar is a struct that holds the required feilds for @@ -45,6 +47,8 @@ func NewProgressBar(label string, silent bool) ProgressBar { func (p ProgressBar) Start() { for { // increment until the Close func is called p.pBar.Increment() + // lower cpu usage from infinite loop + time.Sleep(time.Millisecond * sleepTimeout) } }