diff --git a/docs/cursor.md b/docs/cursor.md new file mode 100644 index 00000000..0a63b386 --- /dev/null +++ b/docs/cursor.md @@ -0,0 +1,85 @@ +# Cursor + +The cursor is a list of `Field`, where `Field` is defined as: + +```go +type Field struct { + Name DbColumnName + Value any + Order OrderDirection +} +``` + +This list will be encoded as a base64 string. + +Each entity defines its own cursor values. For example `IssueMatch` allows the following cursor fields: + +- IssueMatchId +- IssueMatchTargetRemediationDate +- IssueMatchRating +- ComponentInstanceCCRN +- IssuePrimaryName + +```go +func WithIssueMatch(order []Order, im IssueMatch) NewCursor { + + return func(cursors *cursors) error { + cursors.fields = append(cursors.fields, Field{Name: IssueMatchId, Value: im.Id, Order: OrderDirectionAsc}) + cursors.fields = append(cursors.fields, Field{Name: IssueMatchTargetRemediationDate, Value: im.TargetRemediationDate, Order: OrderDirectionAsc}) + cursors.fields = append(cursors.fields, Field{Name: IssueMatchRating, Value: im.Severity.Value, Order: OrderDirectionAsc}) + + if im.ComponentInstance != nil { + cursors.fields = append(cursors.fields, Field{Name: ComponentInstanceCcrn, Value: im.ComponentInstance.CCRN, Order: OrderDirectionAsc}) + } + if im.Issue != nil { + cursors.fields = append(cursors.fields, Field{Name: IssuePrimaryName, Value: im.Issue.PrimaryName, Order: OrderDirectionAsc}) + } + + m := CreateOrderMap(order) + for _, f := range cursors.fields { + if orderDirection, ok := m[f.Name]; ok { + f.Order = orderDirection + } + } + + return nil + } +} +``` + +The cursor is returned by the database layer an can be encoded such as: + +```go + cursor, _ := entity.EncodeCursor(entity.WithIssueMatch(order, im)) +``` + +A order list can be passed to override the default ordering. + +## Cursor Query + +The cursor points to the starting point in a list of database rows. All elements *after* the cursor are returned. +Depending on the ordering, the query looks like: + +```sql +WHERE id < cursor_id + +Or: + +Where id > cursor_id + +``` + +If the cursor contains two fields, the query needs to check for the second field, if the first field is equal: + +```sql +WHERE (id = cursor_id AND primaryName > cursor_primaryName) OR (id > cursor_id) + +``` + +Similarly, for three fields: +```sql +WHERE + (id = cursor_id AND primaryName = cursor_primaryName AND trd > cursor_trd) OR + (id = cursor_id AND primaryName > cursor_primaryName) OR + (id > cursor_id) +``` diff --git a/docs/ordering.md b/docs/ordering.md new file mode 100644 index 00000000..4b76c2bb --- /dev/null +++ b/docs/ordering.md @@ -0,0 +1,105 @@ +# Order + +## API + +The query contains an additional `orderBy` argument: + +```graphql +IssueMatches(filter: IssueMatchFilter, first: Int, after: String, orderBy: [IssueMatchOrderBy]): IssueMatchConnection +``` + +The OrderBy input is defined for each model: + +```graphql +input IssueMatchOrderBy { + by: IssueMatchOrderByField + direction: OrderDirection +} +``` + +The `By` fields define the allowed order options: + +```graphql +enum IssueMatchOrderByField { + primaryName + targetRemediationDate + componentInstanceCcrn +} +``` + +The `OrderDirections` are defined in the `common.graphqls`: +```graphql +enum OrderDirection { + asc + desc +} +``` + +The generated order models are converted to the entity order model in `api/graph/model/models.go`: + +```go +func (imo *IssueMatchOrderBy) ToOrderEntity() entity.Order { + var order entity.Order + switch *imo.By { + case IssueMatchOrderByFieldPrimaryName: + order.By = entity.IssuePrimaryName + case IssueMatchOrderByFieldComponentInstanceCcrn: + order.By = entity.ComponentInstanceCcrn + case IssueMatchOrderByFieldTargetRemediationDate: + order.By = entity.IssueMatchTargetRemediationDate + } + order.Direction = imo.Direction.ToOrderDirectionEntity() + return order +} +``` + +## Entity + +```go +type Order struct { + By DbColumnName + Direction OrderDirection +} +``` + +The `By` field is the database column name and is defined as a constant: + +```go +var DbColumnNameMap = map[DbColumnName]string{ + ComponentInstanceCcrn: "componentinstance_ccrn", + IssuePrimaryName: "issue_primary_name", + IssueMatchId: "issuematch_id", + IssueMatchRating: "issuematch_rating", + IssueMatchTargetRemediationDate: "issuematch_target_remediation_date", + SupportGroupName: "supportgroup_name", +} +``` + + +## Database + +The `GetIssueMatches()` function has an additional order argument: + +```go +func (s *SqlDatabase) GetIssueMatches(filter *entity.IssueMatchFilter, order []entity.Order) ([]entity.IssueMatchResult, error) { + ... +} +``` + +The order string is created by in `entity/order.go`: + +```go +func CreateOrderString(order []Order) string { + orderStr := "" + for i, o := range order { + if i > 0 { + orderStr = fmt.Sprintf("%s, %s %s", orderStr, o.By, o.Direction) + } else { + orderStr = fmt.Sprintf("%s %s %s", orderStr, o.By, o.Direction) + } + } + return orderStr +} +``` + + diff --git a/internal/api/graphql/graph/baseResolver/common.go b/internal/api/graphql/graph/baseResolver/common.go index c76db9d1..b5fdc47b 100644 --- a/internal/api/graphql/graph/baseResolver/common.go +++ b/internal/api/graphql/graph/baseResolver/common.go @@ -103,5 +103,6 @@ func GetListOptions(requestedFields []string) *entity.ListOptions { ShowTotalCount: lo.Contains(requestedFields, "totalCount"), ShowPageInfo: lo.Contains(requestedFields, "pageInfo"), IncludeAggregations: lo.Contains(requestedFields, "edges.node.objectMetadata"), + Order: []entity.Order{}, } } diff --git a/internal/api/graphql/graph/baseResolver/issue_match.go b/internal/api/graphql/graph/baseResolver/issue_match.go index 3814e9de..23310f1f 100644 --- a/internal/api/graphql/graph/baseResolver/issue_match.go +++ b/internal/api/graphql/graph/baseResolver/issue_match.go @@ -54,19 +54,13 @@ func SingleIssueMatchBaseResolver(app app.Heureka, ctx context.Context, parent * return &issueMatch, nil } -func IssueMatchBaseResolver(app app.Heureka, ctx context.Context, filter *model.IssueMatchFilter, first *int, after *string, parent *model.NodeParent) (*model.IssueMatchConnection, error) { +func IssueMatchBaseResolver(app app.Heureka, ctx context.Context, filter *model.IssueMatchFilter, first *int, after *string, orderBy []*model.IssueMatchOrderBy, parent *model.NodeParent) (*model.IssueMatchConnection, error) { requestedFields := GetPreloads(ctx) logrus.WithFields(logrus.Fields{ "requestedFields": requestedFields, "parent": parent, }).Debug("Called IssueMatchBaseResolver") - afterId, err := ParseCursor(after) - if err != nil { - logrus.WithField("after", after).Error("IssueMatchBaseResolver: Error while parsing parameter 'after'") - return nil, NewResolverError("IssueMatchBaseResolver", "Bad Request - unable to parse cursor 'after'") - } - var eId []*int64 var ciId []*int64 var issueId []*int64 @@ -104,7 +98,7 @@ func IssueMatchBaseResolver(app app.Heureka, ctx context.Context, filter *model. f := &entity.IssueMatchFilter{ Id: issue_match_ids, - Paginated: entity.Paginated{First: first, After: afterId}, + PaginatedX: entity.PaginatedX{First: first, After: after}, AffectedServiceCCRN: filter.AffectedService, Status: lo.Map(filter.Status, func(item *model.IssueMatchStatusValues, _ int) *string { return pointer.String(item.String()) }), SeverityValue: lo.Map(filter.Severity, func(item *model.SeverityValues, _ int) *string { return pointer.String(item.String()) }), @@ -120,6 +114,9 @@ func IssueMatchBaseResolver(app app.Heureka, ctx context.Context, filter *model. } opt := GetListOptions(requestedFields) + for _, o := range orderBy { + opt.Order = append(opt.Order, o.ToOrderEntity()) + } issueMatches, err := app.ListIssueMatches(f, opt) diff --git a/internal/api/graphql/graph/model/models.go b/internal/api/graphql/graph/model/models.go index b11c614a..68c571aa 100644 --- a/internal/api/graphql/graph/model/models.go +++ b/internal/api/graphql/graph/model/models.go @@ -49,6 +49,32 @@ var AllIssueMatchStatusValuesOrdered = []IssueMatchStatusValues{ IssueMatchStatusValuesMitigated, } +type HasToEntity interface { + ToOrderEntity() entity.Order +} + +func (od *OrderDirection) ToOrderDirectionEntity() entity.OrderDirection { + direction := entity.OrderDirectionAsc + if *od == OrderDirectionDesc { + direction = entity.OrderDirectionDesc + } + return direction +} + +func (imo *IssueMatchOrderBy) ToOrderEntity() entity.Order { + var order entity.Order + switch *imo.By { + case IssueMatchOrderByFieldPrimaryName: + order.By = entity.IssuePrimaryName + case IssueMatchOrderByFieldComponentInstanceCcrn: + order.By = entity.ComponentInstanceCcrn + case IssueMatchOrderByFieldTargetRemediationDate: + order.By = entity.IssueMatchTargetRemediationDate + } + order.Direction = imo.Direction.ToOrderDirectionEntity() + return order +} + func NewPageInfo(p *entity.PageInfo) *PageInfo { if p == nil { return nil diff --git a/internal/api/graphql/graph/queryCollection/issueMatch/withOrder.graphql b/internal/api/graphql/graph/queryCollection/issueMatch/withOrder.graphql new file mode 100644 index 00000000..90ca7c99 --- /dev/null +++ b/internal/api/graphql/graph/queryCollection/issueMatch/withOrder.graphql @@ -0,0 +1,47 @@ +# SPDX-FileCopyrightText: 2024 SAP SE or an SAP affiliate company and Greenhouse contributors +# SPDX-License-Identifier: Apache-2.0 + +query ($filter: IssueMatchFilter, $first: Int, $after: String, $orderBy: [IssueMatchOrderBy]) { + IssueMatches ( + filter: $filter, + first: $first, + after: $after + orderBy: $orderBy + ) { + totalCount + edges { + node { + id + targetRemediationDate + severity { + value + score + } + issueId + issue { + id + primaryName + } + componentInstanceId + componentInstance { + id + ccrn + } + } + cursor + } + pageInfo { + hasNextPage + hasPreviousPage + isValidPage + pageNumber + nextPageAfter + pages { + after + isCurrent + pageNumber + pageCount + } + } + } +} diff --git a/internal/api/graphql/graph/resolver/component_instance.go b/internal/api/graphql/graph/resolver/component_instance.go index 9dc4d4ef..77997258 100644 --- a/internal/api/graphql/graph/resolver/component_instance.go +++ b/internal/api/graphql/graph/resolver/component_instance.go @@ -33,7 +33,7 @@ func (r *componentInstanceResolver) ComponentVersion(ctx context.Context, obj *m } func (r *componentInstanceResolver) IssueMatches(ctx context.Context, obj *model.ComponentInstance, filter *model.IssueMatchFilter, first *int, after *string) (*model.IssueMatchConnection, error) { - return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, &model.NodeParent{ + return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, nil, &model.NodeParent{ Parent: obj, ParentName: model.ComponentInstanceNodeName, }) diff --git a/internal/api/graphql/graph/resolver/evidence.go b/internal/api/graphql/graph/resolver/evidence.go index 76175a33..268e20d6 100644 --- a/internal/api/graphql/graph/resolver/evidence.go +++ b/internal/api/graphql/graph/resolver/evidence.go @@ -48,7 +48,7 @@ func (r *evidenceResolver) Activity(ctx context.Context, obj *model.Evidence) (* } func (r *evidenceResolver) IssueMatches(ctx context.Context, obj *model.Evidence, filter *model.IssueMatchFilter, first *int, after *string) (*model.IssueMatchConnection, error) { - return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, &model.NodeParent{ + return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, nil, &model.NodeParent{ Parent: obj, ParentName: model.EvidenceNodeName, }) diff --git a/internal/api/graphql/graph/resolver/issue.go b/internal/api/graphql/graph/resolver/issue.go index 36b06d40..1657474a 100644 --- a/internal/api/graphql/graph/resolver/issue.go +++ b/internal/api/graphql/graph/resolver/issue.go @@ -30,7 +30,7 @@ func (r *issueResolver) Activities(ctx context.Context, obj *model.Issue, filter } func (r *issueResolver) IssueMatches(ctx context.Context, obj *model.Issue, filter *model.IssueMatchFilter, first *int, after *string) (*model.IssueMatchConnection, error) { - return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, &model.NodeParent{ + return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, nil, &model.NodeParent{ Parent: obj, ParentName: model.IssueNodeName, }) diff --git a/internal/api/graphql/graph/resolver/query.go b/internal/api/graphql/graph/resolver/query.go index 7fb5d14d..78785c4f 100644 --- a/internal/api/graphql/graph/resolver/query.go +++ b/internal/api/graphql/graph/resolver/query.go @@ -21,8 +21,8 @@ func (r *queryResolver) Issues(ctx context.Context, filter *model.IssueFilter, f return baseResolver.IssueBaseResolver(r.App, ctx, filter, first, after, nil) } -func (r *queryResolver) IssueMatches(ctx context.Context, filter *model.IssueMatchFilter, first *int, after *string) (*model.IssueMatchConnection, error) { - return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, nil) +func (r *queryResolver) IssueMatches(ctx context.Context, filter *model.IssueMatchFilter, first *int, after *string, orderBy []*model.IssueMatchOrderBy) (*model.IssueMatchConnection, error) { + return baseResolver.IssueMatchBaseResolver(r.App, ctx, filter, first, after, orderBy, nil) } func (r *queryResolver) IssueMatchChanges(ctx context.Context, filter *model.IssueMatchChangeFilter, first *int, after *string) (*model.IssueMatchChangeConnection, error) { diff --git a/internal/api/graphql/graph/schema/common.graphqls b/internal/api/graphql/graph/schema/common.graphqls index 2e165054..17cbefbc 100644 --- a/internal/api/graphql/graph/schema/common.graphqls +++ b/internal/api/graphql/graph/schema/common.graphqls @@ -115,6 +115,11 @@ type Metadata { updated_by: String } +enum OrderDirection { + asc + desc +} + enum StateFilter { Active, Deleted diff --git a/internal/api/graphql/graph/schema/issue_match.graphqls b/internal/api/graphql/graph/schema/issue_match.graphqls index 69af56e5..f6dc3b42 100644 --- a/internal/api/graphql/graph/schema/issue_match.graphqls +++ b/internal/api/graphql/graph/schema/issue_match.graphqls @@ -66,3 +66,14 @@ enum IssueMatchStatusValues { false_positive mitigated } + +input IssueMatchOrderBy { + by: IssueMatchOrderByField + direction: OrderDirection +} + +enum IssueMatchOrderByField { + primaryName + targetRemediationDate + componentInstanceCcrn +} diff --git a/internal/api/graphql/graph/schema/query.graphqls b/internal/api/graphql/graph/schema/query.graphqls index 334c1316..e63cdc12 100644 --- a/internal/api/graphql/graph/schema/query.graphqls +++ b/internal/api/graphql/graph/schema/query.graphqls @@ -3,7 +3,7 @@ type Query { Issues(filter: IssueFilter, first: Int, after: String): IssueConnection - IssueMatches(filter: IssueMatchFilter, first: Int, after: String): IssueMatchConnection + IssueMatches(filter: IssueMatchFilter, first: Int, after: String, orderBy: [IssueMatchOrderBy]): IssueMatchConnection IssueMatchChanges(filter: IssueMatchChangeFilter, first: Int, after: String): IssueMatchChangeConnection Services(filter: ServiceFilter, first: Int, after: String): ServiceConnection Components(filter: ComponentFilter, first: Int, after: String): ComponentConnection diff --git a/internal/app/common/pagination_helpers.go b/internal/app/common/pagination_helpers.go index 8e277c7f..debcd026 100644 --- a/internal/app/common/pagination_helpers.go +++ b/internal/app/common/pagination_helpers.go @@ -33,6 +33,17 @@ func EnsurePaginated(filter *entity.Paginated) { } } +func EnsurePaginatedX(filter *entity.PaginatedX) { + if filter.First == nil { + first := 10 + filter.First = &first + } + if filter.After == nil { + var after string = "" + filter.After = &after + } +} + func GetPages(firstCursor *string, ids []int64, pageSize int) ([]entity.Page, entity.Page) { var currentCursor = util.Ptr("0") var pages []entity.Page @@ -73,6 +84,46 @@ func GetPages(firstCursor *string, ids []int64, pageSize int) ([]entity.Page, en return pages, currentPage } +func GetCursorPages(firstCursor *string, cursors []string, pageSize int) ([]entity.Page, entity.Page) { + var currentCursor = "" + var pages []entity.Page + var currentPage entity.Page + var i = 0 + var pN = 0 + var page entity.Page + for _, c := range cursors { + i++ + if i == 1 { + pN++ + page = entity.Page{ + After: ¤tCursor, + IsCurrent: false, + } + } + if c == *firstCursor { + page.IsCurrent = true + } + page.PageCount = util.Ptr(i) + if i >= pageSize { + currentCursor = c + page.PageNumber = util.Ptr(pN) + pages = append(pages, page) + i = 0 + if page.IsCurrent { + currentPage = page + } + } + } + if len(cursors)%pageSize != 0 { + page.PageNumber = util.Ptr(pN) + pages = append(pages, page) + if page.IsCurrent { + currentPage = page + } + } + return pages, currentPage +} + func GetPageInfo[T entity.HasCursor](res []T, ids []int64, pageSize int, currentCursor int64) *entity.PageInfo { var nextPageAfter *string currentAfter := util.Ptr(fmt.Sprintf("%d", currentCursor)) @@ -95,6 +146,30 @@ func GetPageInfo[T entity.HasCursor](res []T, ids []int64, pageSize int, current } } +func GetPageInfoX[T entity.HasCursor](res []T, cursors []string, pageSize int, currentCursor *string) *entity.PageInfo { + + var nextPageAfter *string + currentAfter := currentCursor + firstCursor := res[0].Cursor() + + if len(res) > 1 { + nextPageAfter = res[len(res)-1].Cursor() + } else { + nextPageAfter = firstCursor + } + + pages, currentPage := GetCursorPages(firstCursor, cursors, pageSize) + + return &entity.PageInfo{ + HasNextPage: util.Ptr(currentPage.PageNumber != nil && *currentPage.PageNumber < len(pages)), + HasPreviousPage: util.Ptr(currentPage.PageNumber != nil && *currentPage.PageNumber > 1), + IsValidPage: util.Ptr(currentPage.After != nil && currentAfter != nil && *currentPage.After == *currentAfter), + PageNumber: currentPage.PageNumber, + NextPageAfter: nextPageAfter, + Pages: pages, + } +} + func FinalizePagination[T entity.HasCursor](results []T, filter *entity.Paginated, options *entity.ListOptions) (*entity.PageInfo, []T) { var pageInfo entity.PageInfo count := len(results) diff --git a/internal/app/issue/issue_handler_events.go b/internal/app/issue/issue_handler_events.go index f553601d..5b09deb8 100644 --- a/internal/app/issue/issue_handler_events.go +++ b/internal/app/issue/issue_handler_events.go @@ -158,7 +158,7 @@ func createIssueMatches( issue_matches, err := db.GetIssueMatches(&entity.IssueMatchFilter{ IssueId: []*int64{&issueId}, ComponentInstanceId: []*int64{&componentInstanceId}, - }) + }, []entity.Order{}) if err != nil { l.WithField("event-step", "FetchIssueMatches").WithError(err).Error("Error while fetching issue matches related to assigned Component Instance") diff --git a/internal/app/issue/issue_handler_events_test.go b/internal/app/issue/issue_handler_events_test.go index e6422882..4e7d6ad2 100644 --- a/internal/app/issue/issue_handler_events_test.go +++ b/internal/app/issue/issue_handler_events_test.go @@ -88,7 +88,7 @@ var _ = Describe("OnComponentVersionAttachmentToIssue", Label("app", "ComponentV db.On("GetIssueMatches", &entity.IssueMatchFilter{ ComponentInstanceId: []*int64{&componentInstance.Id}, IssueId: []*int64{&issueEntity.Id}, - }).Return([]entity.IssueMatch{}, nil) + }, []entity.Order{}).Return([]entity.IssueMatchResult{}, nil) db.On("GetServiceIssueVariants", &entity.ServiceIssueVariantFilter{ ComponentInstanceId: []*int64{&componentInstance.Id}, @@ -113,7 +113,7 @@ var _ = Describe("OnComponentVersionAttachmentToIssue", Label("app", "ComponentV }) It("skips creation if match already exists", func() { - existingMatch := test.NewFakeIssueMatch() + existingMatch := test.NewFakeIssueMatchResult() db.On("GetServiceIssueVariants", &entity.ServiceIssueVariantFilter{ ComponentInstanceId: []*int64{&componentInstance.Id}, IssueId: []*int64{&issueEntity.Id}, @@ -123,7 +123,7 @@ var _ = Describe("OnComponentVersionAttachmentToIssue", Label("app", "ComponentV db.On("GetIssueMatches", &entity.IssueMatchFilter{ ComponentInstanceId: []*int64{&componentInstance.Id}, IssueId: []*int64{&issueEntity.Id}, - }).Return([]entity.IssueMatch{existingMatch}, nil) + }, []entity.Order{}).Return([]entity.IssueMatchResult{existingMatch}, nil) issue.OnComponentVersionAttachmentToIssue(db, event) db.AssertNotCalled(GinkgoT(), "CreateIssueMatch", mock.Anything) diff --git a/internal/app/issue_match/issue_match_handler.go b/internal/app/issue_match/issue_match_handler.go index c17e81b5..69ea3b53 100644 --- a/internal/app/issue_match/issue_match_handler.go +++ b/internal/app/issue_match/issue_match_handler.go @@ -12,7 +12,6 @@ import ( "github.com/cloudoperators/heureka/internal/database" "github.com/cloudoperators/heureka/internal/entity" - "github.com/cloudoperators/heureka/pkg/util" "github.com/sirupsen/logrus" ) @@ -42,21 +41,13 @@ func (e *IssueMatchHandlerError) Error() string { return e.message } -func (h *issueMatchHandler) getIssueMatchResults(filter *entity.IssueMatchFilter) ([]entity.IssueMatchResult, error) { - var results []entity.IssueMatchResult - ims, err := h.database.GetIssueMatches(filter) +func (h *issueMatchHandler) getIssueMatchResults(filter *entity.IssueMatchFilter, order []entity.Order) ([]entity.IssueMatchResult, error) { + ims, err := h.database.GetIssueMatches(filter, order) if err != nil { return nil, err } - for _, im := range ims { - cursor := fmt.Sprintf("%d", im.Id) - results = append(results, entity.IssueMatchResult{ - WithCursor: entity.WithCursor{Value: cursor}, - IssueMatch: util.Ptr(im), - }) - } - return results, nil + return ims, nil } func (im *issueMatchHandler) GetIssueMatch(issueMatchId int64) (*entity.IssueMatch, error) { @@ -65,7 +56,8 @@ func (im *issueMatchHandler) GetIssueMatch(issueMatchId int64) (*entity.IssueMat "id": issueMatchId, }) issueMatchFilter := entity.IssueMatchFilter{Id: []*int64{&issueMatchId}} - issueMatches, err := im.ListIssueMatches(&issueMatchFilter, &entity.ListOptions{}) + options := entity.ListOptions{Order: []entity.Order{}} + issueMatches, err := im.ListIssueMatches(&issueMatchFilter, &options) if err != nil { l.Error(err) @@ -88,14 +80,14 @@ func (im *issueMatchHandler) ListIssueMatches(filter *entity.IssueMatchFilter, o var count int64 var pageInfo *entity.PageInfo - common.EnsurePaginated(&filter.Paginated) + common.EnsurePaginatedX(&filter.PaginatedX) l := logrus.WithFields(logrus.Fields{ "event": ListIssueMatchesEventName, "filter": filter, }) - res, err := im.getIssueMatchResults(filter) + res, err := im.database.GetIssueMatches(filter, options.Order) if err != nil { l.Error(err) @@ -104,13 +96,13 @@ func (im *issueMatchHandler) ListIssueMatches(filter *entity.IssueMatchFilter, o if options.ShowPageInfo { if len(res) > 0 { - ids, err := im.database.GetAllIssueMatchIds(filter) + cursors, err := im.database.GetAllIssueMatchCursors(filter, options.Order) if err != nil { l.Error(err) return nil, NewIssueMatchHandlerError("Error while getting all Ids") } - pageInfo = common.GetPageInfo(res, ids, *filter.First, *filter.After) - count = int64(len(ids)) + pageInfo = common.GetPageInfoX(res, cursors, *filter.First, filter.After) + count = int64(len(cursors)) } } else if options.ShowTotalCount { count, err = im.database.CountIssueMatches(filter) diff --git a/internal/app/issue_match/issue_match_handler_events.go b/internal/app/issue_match/issue_match_handler_events.go index 3003e2ce..1460b30d 100644 --- a/internal/app/issue_match/issue_match_handler_events.go +++ b/internal/app/issue_match/issue_match_handler_events.go @@ -171,7 +171,7 @@ func OnComponentVersionAssignmentToComponentInstance(db database.Database, compo issue_matches, err := db.GetIssueMatches(&entity.IssueMatchFilter{ IssueId: []*int64{&issueId}, ComponentInstanceId: []*int64{&componentInstanceID}, - }) + }, nil) if err != nil { l.WithField("event-step", "FetchIssueMatches").WithError(err).Error("Error while fetching issue matches related to assigned Component Instance") diff --git a/internal/app/issue_match/issue_match_handler_test.go b/internal/app/issue_match/issue_match_handler_test.go index 3e83f188..92ef4b81 100644 --- a/internal/app/issue_match/issue_match_handler_test.go +++ b/internal/app/issue_match/issue_match_handler_test.go @@ -14,6 +14,7 @@ import ( "github.com/cloudoperators/heureka/internal/app/issue_repository" "github.com/cloudoperators/heureka/internal/app/issue_variant" "github.com/cloudoperators/heureka/internal/app/severity" + "github.com/cloudoperators/heureka/internal/database/mariadb" "github.com/samber/lo" @@ -39,7 +40,7 @@ var _ = BeforeSuite(func() { func getIssueMatchFilter() *entity.IssueMatchFilter { return &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: nil, After: nil, }, @@ -71,7 +72,7 @@ var _ = Describe("When listing IssueMatches", Label("app", "ListIssueMatches"), BeforeEach(func() { options.ShowTotalCount = true - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{}, nil) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{}, nil) db.On("CountIssueMatches", filter).Return(int64(1337), nil) }) @@ -89,16 +90,26 @@ var _ = Describe("When listing IssueMatches", Label("app", "ListIssueMatches"), }) DescribeTable("pagination information is correct", func(pageSize int, dbElements int, resElements int, hasNextPage bool) { filter.First = &pageSize - matches := test.NNewFakeIssueMatches(resElements) + matches := []entity.IssueMatchResult{} + for _, im := range test.NNewFakeIssueMatches(resElements) { + cursor, _ := mariadb.EncodeCursor(mariadb.WithIssueMatch([]entity.Order{}, im)) + matches = append(matches, entity.IssueMatchResult{WithCursor: entity.WithCursor{Value: cursor}, IssueMatch: lo.ToPtr(im)}) + } + + var cursors = lo.Map(matches, func(m entity.IssueMatchResult, _ int) string { + cursor, _ := mariadb.EncodeCursor(mariadb.WithIssueMatch([]entity.Order{}, *m.IssueMatch)) + return cursor + }) - var ids = lo.Map(matches, func(m entity.IssueMatch, _ int) int64 { return m.Id }) var i int64 = 0 - for len(ids) < dbElements { + for len(cursors) < dbElements { i++ - ids = append(ids, i) + im := test.NewFakeIssueMatch() + c, _ := mariadb.EncodeCursor(mariadb.WithIssueMatch([]entity.Order{}, im)) + cursors = append(cursors, c) } - db.On("GetIssueMatches", filter).Return(matches, nil) - db.On("GetAllIssueMatchIds", filter).Return(ids, nil) + db.On("GetIssueMatches", filter, []entity.Order{}).Return(matches, nil) + db.On("GetAllIssueMatchCursors", filter, []entity.Order{}).Return(cursors, nil) issueMatchHandler = im.NewIssueMatchHandler(db, er, nil) res, err := issueMatchHandler.ListIssueMatches(filter, options) Expect(err).To(BeNil(), "no error should be thrown") @@ -121,7 +132,7 @@ var _ = Describe("When listing IssueMatches", Label("app", "ListIssueMatches"), Context("and the given filter does not have any matches in the database", func() { BeforeEach(func() { - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{}, nil) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{}, nil) }) It("should return an empty result", func() { @@ -134,7 +145,11 @@ var _ = Describe("When listing IssueMatches", Label("app", "ListIssueMatches"), }) Context("and the filter does have results in the database", func() { BeforeEach(func() { - db.On("GetIssueMatches", filter).Return(test.NNewFakeIssueMatches(15), nil) + issueMatches := []entity.IssueMatchResult{} + for _, im := range test.NNewFakeIssueMatches(15) { + issueMatches = append(issueMatches, entity.IssueMatchResult{IssueMatch: lo.ToPtr(im)}) + } + db.On("GetIssueMatches", filter, []entity.Order{}).Return(issueMatches, nil) }) It("should return the expected matches in the result", func() { issueMatchHandler = im.NewIssueMatchHandler(db, er, nil) @@ -146,7 +161,7 @@ var _ = Describe("When listing IssueMatches", Label("app", "ListIssueMatches"), Context("and the database operations throw an error", func() { BeforeEach(func() { - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{}, errors.New("some error")) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{}, errors.New("some error")) }) It("should return the expected matches in the result", func() { @@ -222,18 +237,17 @@ var _ = Describe("When updating IssueMatch", Label("app", "UpdateIssueMatch"), f var ( db *mocks.MockDatabase issueMatchHandler im.IssueMatchHandler - issueMatch entity.IssueMatch + issueMatch entity.IssueMatchResult filter *entity.IssueMatchFilter ) BeforeEach(func() { db = mocks.NewMockDatabase(GinkgoT()) - issueMatch = test.NewFakeIssueMatch() + issueMatch = test.NewFakeIssueMatchResult() first := 10 - var after int64 - after = 0 + after := "" filter = &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: &first, After: &after, }, @@ -242,7 +256,7 @@ var _ = Describe("When updating IssueMatch", Label("app", "UpdateIssueMatch"), f It("updates issueMatch", func() { db.On("GetAllUserIds", mock.Anything).Return([]int64{}, nil) - db.On("UpdateIssueMatch", &issueMatch).Return(nil) + db.On("UpdateIssueMatch", issueMatch.IssueMatch).Return(nil) issueMatchHandler = im.NewIssueMatchHandler(db, er, nil) if issueMatch.Status == entity.NewIssueMatchStatusValue("new") { issueMatch.Status = entity.NewIssueMatchStatusValue("risk_accepted") @@ -250,8 +264,8 @@ var _ = Describe("When updating IssueMatch", Label("app", "UpdateIssueMatch"), f issueMatch.Status = entity.NewIssueMatchStatusValue("new") } filter.Id = []*int64{&issueMatch.Id} - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{issueMatch}, nil) - updatedIssueMatch, err := issueMatchHandler.UpdateIssueMatch(&issueMatch) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{issueMatch}, nil) + updatedIssueMatch, err := issueMatchHandler.UpdateIssueMatch(issueMatch.IssueMatch) Expect(err).To(BeNil(), "no error should be thrown") By("setting fields", func() { Expect(updatedIssueMatch.TargetRemediationDate).To(BeEquivalentTo(issueMatch.TargetRemediationDate)) @@ -273,32 +287,33 @@ var _ = Describe("When deleting IssueMatch", Label("app", "DeleteIssueMatch"), f issueMatchHandler im.IssueMatchHandler id int64 filter *entity.IssueMatchFilter + options *entity.ListOptions ) BeforeEach(func() { db = mocks.NewMockDatabase(GinkgoT()) id = 1 first := 10 - var after int64 - after = 0 + after := "" filter = &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: &first, After: &after, }, } + options = entity.NewListOptions() }) It("deletes issueMatch", func() { db.On("GetAllUserIds", mock.Anything).Return([]int64{}, nil) db.On("DeleteIssueMatch", id, mock.Anything).Return(nil) issueMatchHandler = im.NewIssueMatchHandler(db, er, nil) - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{}, nil) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{}, nil) err := issueMatchHandler.DeleteIssueMatch(id) Expect(err).To(BeNil(), "no error should be thrown") filter.Id = []*int64{&id} - issueMatches, err := issueMatchHandler.ListIssueMatches(filter, &entity.ListOptions{}) + issueMatches, err := issueMatchHandler.ListIssueMatches(filter, options) Expect(err).To(BeNil(), "no error should be thrown") Expect(issueMatches.Elements).To(BeEmpty(), "no error should be thrown") }) @@ -309,19 +324,18 @@ var _ = Describe("When modifying relationship of evidence and issueMatch", Label db *mocks.MockDatabase issueMatchHandler im.IssueMatchHandler evidence entity.Evidence - issueMatch entity.IssueMatch + issueMatch entity.IssueMatchResult filter *entity.IssueMatchFilter ) BeforeEach(func() { db = mocks.NewMockDatabase(GinkgoT()) - issueMatch = test.NewFakeIssueMatch() + issueMatch = test.NewFakeIssueMatchResult() evidence = test.NewFakeEvidenceEntity() first := 10 - var after int64 - after = 0 + after := "" filter = &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: &first, After: &after, }, @@ -331,7 +345,7 @@ var _ = Describe("When modifying relationship of evidence and issueMatch", Label It("adds evidence to issueMatch", func() { db.On("AddEvidenceToIssueMatch", issueMatch.Id, evidence.Id).Return(nil) - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{issueMatch}, nil) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{issueMatch}, nil) issueMatchHandler = im.NewIssueMatchHandler(db, er, nil) issueMatch, err := issueMatchHandler.AddEvidenceToIssueMatch(issueMatch.Id, evidence.Id) Expect(err).To(BeNil(), "no error should be thrown") @@ -340,7 +354,7 @@ var _ = Describe("When modifying relationship of evidence and issueMatch", Label It("removes evidence from issueMatch", func() { db.On("RemoveEvidenceFromIssueMatch", issueMatch.Id, evidence.Id).Return(nil) - db.On("GetIssueMatches", filter).Return([]entity.IssueMatch{issueMatch}, nil) + db.On("GetIssueMatches", filter, []entity.Order{}).Return([]entity.IssueMatchResult{issueMatch}, nil) issueMatchHandler = im.NewIssueMatchHandler(db, er, nil) issueMatch, err := issueMatchHandler.RemoveEvidenceFromIssueMatch(issueMatch.Id, evidence.Id) Expect(err).To(BeNil(), "no error should be thrown") @@ -468,7 +482,7 @@ var _ = Describe("OnComponentInstanceCreate", Label("app", "OnComponentInstanceC }) It("should create issue matches for each issue", func() { - db.On("GetIssueMatches", mock.Anything).Return([]entity.IssueMatch{}, nil) + db.On("GetIssueMatches", mock.Anything, mock.Anything).Return([]entity.IssueMatchResult{}, nil) // Mock CreateIssueMatch db.On("CreateIssueMatch", mock.AnythingOfType("*entity.IssueMatch")).Return(&entity.IssueMatch{}, nil).Twice() im.OnComponentVersionAssignmentToComponentInstance(db, componentInstanceID, componentVersionID) @@ -480,10 +494,10 @@ var _ = Describe("OnComponentInstanceCreate", Label("app", "OnComponentInstanceC Context("when issue matches already exist", func() { BeforeEach(func() { // Fake issues - issueMatch := test.NewFakeIssueMatch() + issueMatch := test.NewFakeIssueMatchResult() issueMatch.IssueId = 2 // issue2.Id //when issueid is 2 return a fake issue match - db.On("GetIssueMatches", mock.Anything).Return([]entity.IssueMatch{issueMatch}, nil).Once() + db.On("GetIssueMatches", mock.Anything, mock.Anything).Return([]entity.IssueMatchResult{issueMatch}, nil).Once() }) It("should should not create new issues", func() { diff --git a/internal/database/interface.go b/internal/database/interface.go index c3fbbab7..2d36bfbf 100644 --- a/internal/database/interface.go +++ b/internal/database/interface.go @@ -35,8 +35,9 @@ type Database interface { GetDefaultIssuePriority() int64 GetDefaultRepositoryName() string - GetIssueMatches(*entity.IssueMatchFilter) ([]entity.IssueMatch, error) + GetIssueMatches(*entity.IssueMatchFilter, []entity.Order) ([]entity.IssueMatchResult, error) GetAllIssueMatchIds(*entity.IssueMatchFilter) ([]int64, error) + GetAllIssueMatchCursors(*entity.IssueMatchFilter, []entity.Order) ([]string, error) CountIssueMatches(filter *entity.IssueMatchFilter) (int64, error) CreateIssueMatch(*entity.IssueMatch) (*entity.IssueMatch, error) UpdateIssueMatch(*entity.IssueMatch) error diff --git a/internal/database/mariadb/cursor.go b/internal/database/mariadb/cursor.go new file mode 100644 index 00000000..d241e700 --- /dev/null +++ b/internal/database/mariadb/cursor.go @@ -0,0 +1,131 @@ +// SPDX-FileCopyrightText: 2024 SAP SE or an SAP affiliate company and Greenhouse contributors +// SPDX-License-Identifier: Apache-2.0 + +package mariadb + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + + "github.com/cloudoperators/heureka/internal/entity" +) + +type Field struct { + Name entity.OrderByField + Value any + Order entity.OrderDirection +} + +type cursors struct { + fields []Field +} + +type NewCursor func(cursors *cursors) error + +func EncodeCursor(opts ...NewCursor) (string, error) { + var cursors cursors + for _, opt := range opts { + err := opt(&cursors) + if err != nil { + return "", err + } + } + + var buf bytes.Buffer + encoder := base64.NewEncoder(base64.StdEncoding, &buf) + err := json.NewEncoder(encoder).Encode(cursors.fields) + if err != nil { + return "", err + } + encoder.Close() + return buf.String(), nil +} + +func DecodeCursor(cursor *string) ([]Field, error) { + var fields []Field + if cursor == nil || *cursor == "" { + return fields, nil + } + decoded, err := base64.StdEncoding.DecodeString(*cursor) + if err != nil { + return nil, fmt.Errorf("failed to decode base64 string: %w", err) + } + + if err := json.Unmarshal(decoded, &fields); err != nil { + return nil, fmt.Errorf("failed to unmarshal JSON: %w", err) + } + + return fields, nil +} + +func CreateCursorQuery(query string, fields []Field) string { + if len(fields) == 0 { + return query + } + + subQuery := "" + for i, f := range fields { + dir := ">" + switch f.Order { + case entity.OrderDirectionAsc: + dir = ">" + case entity.OrderDirectionDesc: + dir = "<" + } + if i >= len(fields)-1 { + subQuery = fmt.Sprintf("%s %s %s ? ", subQuery, ColumnName(f.Name), dir) + } else { + subQuery = fmt.Sprintf("%s %s = ? AND ", subQuery, ColumnName(f.Name)) + } + } + + subQuery = fmt.Sprintf("( %s )", subQuery) + if query != "" { + subQuery = fmt.Sprintf("%s OR %s", query, subQuery) + } + + return CreateCursorQuery(subQuery, fields[:len(fields)-1]) +} + +func CreateCursorParameters(params []any, fields []Field) []any { + if len(fields) == 0 { + return params + } + + for i := 0; i < len(fields); i++ { + params = append(params, fields[i].Value) + } + + return CreateCursorParameters(params, fields[:len(fields)-1]) +} + +func WithIssueMatch(order []entity.Order, im entity.IssueMatch) NewCursor { + + return func(cursors *cursors) error { + order = GetDefaultOrder(order, entity.IssueMatchId, entity.OrderDirectionAsc) + for _, o := range order { + switch o.By { + case entity.IssueMatchId: + cursors.fields = append(cursors.fields, Field{Name: entity.IssueMatchId, Value: im.Id, Order: o.Direction}) + case entity.IssueMatchTargetRemediationDate: + cursors.fields = append(cursors.fields, Field{Name: entity.IssueMatchTargetRemediationDate, Value: im.TargetRemediationDate, Order: o.Direction}) + case entity.IssueMatchRating: + cursors.fields = append(cursors.fields, Field{Name: entity.IssueMatchRating, Value: im.Severity.Value, Order: o.Direction}) + case entity.ComponentInstanceCcrn: + if im.ComponentInstance != nil { + cursors.fields = append(cursors.fields, Field{Name: entity.ComponentInstanceCcrn, Value: im.ComponentInstance.CCRN, Order: o.Direction}) + } + case entity.IssuePrimaryName: + if im.Issue != nil { + cursors.fields = append(cursors.fields, Field{Name: entity.IssuePrimaryName, Value: im.Issue.PrimaryName, Order: o.Direction}) + } + default: + continue + } + } + + return nil + } +} diff --git a/internal/database/mariadb/database.go b/internal/database/mariadb/database.go index b89b076f..09a3ba92 100644 --- a/internal/database/mariadb/database.go +++ b/internal/database/mariadb/database.go @@ -246,7 +246,7 @@ func performExec[T any](s *SqlDatabase, query string, item T, l *logrus.Entry) ( return res, nil } -func performListScan[T DatabaseRow, E entity.HeurekaEntity](stmt *sqlx.Stmt, filterParameters []interface{}, l *logrus.Entry, listBuilder func([]E, T) []E) ([]E, error) { +func performListScan[T DatabaseRow, E entity.HeurekaEntity | DatabaseRow](stmt *sqlx.Stmt, filterParameters []interface{}, l *logrus.Entry, listBuilder func([]E, T) []E) ([]E, error) { rows, err := stmt.Queryx(filterParameters...) if err != nil { msg := "Error while performing Query from prepared Statement" @@ -392,6 +392,14 @@ func getCursor(p entity.Paginated, filterStr string, stmt string) entity.Cursor } } +func GetDefaultOrder(order []entity.Order, by entity.OrderByField, direction entity.OrderDirection) []entity.Order { + if len(order) == 0 { + order = append([]entity.Order{{By: by, Direction: direction}}, order...) + } + + return order +} + func buildStateFilterQuery(state []entity.StateFilterType, prefix string) string { stateQueries := []string{} if len(state) < 1 { diff --git a/internal/database/mariadb/entity.go b/internal/database/mariadb/entity.go index d0111730..63c51765 100644 --- a/internal/database/mariadb/entity.go +++ b/internal/database/mariadb/entity.go @@ -59,6 +59,38 @@ func GetUserTypeValue(v sql.NullInt64) entity.UserType { } } +// RowComposite is a composite type that contains all the row types for the database +// This is used to unmarshal the database rows into the corresponding entity types in a dynamical manner +type RowComposite struct { + *IssueRow + *IssueCountRow + *GetIssuesByRow + *IssueMatchRow + *IssueAggregationsRow + *IssueVariantRow + *BaseIssueRepositoryRow + *IssueRepositoryRow + *IssueVariantWithRepository + *ComponentRow + *ComponentInstanceRow + *ComponentVersionRow + *BaseServiceRow + *ServiceRow + *GetServicesByRow + *ServiceAggregationsRow + *ActivityRow + *UserRow + *EvidenceRow + *OwnerRow + *SupportGroupRow + *SupportGroupServiceRow + *ActivityHasIssueRow + *ActivityHasServiceRow + *IssueRepositoryServiceRow + *IssueMatchChangeRow + *ServiceIssueVariantRow +} + type DatabaseRow interface { IssueRow | IssueCountRow | @@ -86,7 +118,8 @@ type DatabaseRow interface { ActivityHasServiceRow | IssueRepositoryServiceRow | IssueMatchChangeRow | - ServiceIssueVariantRow + ServiceIssueVariantRow | + RowComposite } type IssueRow struct { diff --git a/internal/database/mariadb/issue_match.go b/internal/database/mariadb/issue_match.go index 1416052b..de4c8bb8 100644 --- a/internal/database/mariadb/issue_match.go +++ b/internal/database/mariadb/issue_match.go @@ -9,6 +9,7 @@ import ( "github.com/cloudoperators/heureka/internal/entity" "github.com/jmoiron/sqlx" + "github.com/samber/lo" "github.com/sirupsen/logrus" ) @@ -18,9 +19,9 @@ func (s *SqlDatabase) ensureIssueMatchFilter(f *entity.IssueMatchFilter) *entity } var first = 1000 - var after int64 = 0 + var after string = "" return &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: &first, After: &after, }, @@ -47,10 +48,16 @@ func (s *SqlDatabase) getIssueMatchFilterString(filter *entity.IssueMatchFilter) return combineFilterQueries(fl, OP_AND) } -func (s *SqlDatabase) getIssueMatchJoins(filter *entity.IssueMatchFilter) string { +func (s *SqlDatabase) getIssueMatchJoins(filter *entity.IssueMatchFilter, order []entity.Order) string { joins := "" + orderByIssuePrimaryName := lo.ContainsBy(order, func(o entity.Order) bool { + return o.By == entity.IssuePrimaryName + }) + orderByCiCcrn := lo.ContainsBy(order, func(o entity.Order) bool { + return o.By == entity.ComponentInstanceCcrn + }) - if len(filter.Search) > 0 || len(filter.IssueType) > 0 || len(filter.PrimaryName) > 0 { + if len(filter.Search) > 0 || len(filter.IssueType) > 0 || len(filter.PrimaryName) > 0 || orderByIssuePrimaryName { joins = fmt.Sprintf("%s\n%s", joins, ` LEFT JOIN Issue I on I.issue_id = IM.issuematch_issue_id `) @@ -93,6 +100,13 @@ func (s *SqlDatabase) getIssueMatchJoins(filter *entity.IssueMatchFilter) string `) } } + + if orderByCiCcrn { + joins = fmt.Sprintf("%s\n%s", joins, ` + LEFT JOIN ComponentInstance CI on CI.componentinstance_id = IM.issuematch_component_instance_id + `) + } + return joins } @@ -128,30 +142,54 @@ func (s *SqlDatabase) getIssueMatchUpdateFields(issueMatch *entity.IssueMatch) s return strings.Join(fl, ", ") } -func (s *SqlDatabase) buildIssueMatchStatement(baseQuery string, filter *entity.IssueMatchFilter, withCursor bool, l *logrus.Entry) (*sqlx.Stmt, []interface{}, error) { +func (s *SqlDatabase) getIssueMatchColumns(order []entity.Order) string { + columns := "" + for _, o := range order { + switch o.By { + case entity.IssuePrimaryName: + columns = fmt.Sprintf("%s, I.issue_primary_name", columns) + case entity.ComponentInstanceCcrn: + columns = fmt.Sprintf("%s, CI.componentinstance_ccrn", columns) + } + } + return columns +} + +func (s *SqlDatabase) buildIssueMatchStatement(baseQuery string, filter *entity.IssueMatchFilter, withCursor bool, order []entity.Order, l *logrus.Entry) (*sqlx.Stmt, []interface{}, error) { var query string filter = s.ensureIssueMatchFilter(filter) l.WithFields(logrus.Fields{"filter": filter}) filterStr := s.getIssueMatchFilterString(filter) - joins := s.getIssueMatchJoins(filter) - cursor := getCursor(filter.Paginated, filterStr, "IM.issuematch_id > ?") + cursorFields, err := DecodeCursor(filter.PaginatedX.After) + if err != nil { + return nil, nil, err + } + cursorQuery := CreateCursorQuery("", cursorFields) + + order = GetDefaultOrder(order, entity.IssueMatchId, entity.OrderDirectionAsc) + orderStr := CreateOrderString(order) + columns := s.getIssueMatchColumns(order) + joins := s.getIssueMatchJoins(filter, order) whereClause := "" if filterStr != "" || withCursor { whereClause = fmt.Sprintf("WHERE %s", filterStr) } + if filterStr != "" && withCursor && cursorQuery != "" { + cursorQuery = fmt.Sprintf(" AND (%s)", cursorQuery) + } + // construct final query if withCursor { - query = fmt.Sprintf(baseQuery, joins, whereClause, cursor.Statement) + query = fmt.Sprintf(baseQuery, columns, joins, whereClause, cursorQuery, orderStr) } else { - query = fmt.Sprintf(baseQuery, joins, whereClause) + query = fmt.Sprintf(baseQuery, columns, joins, whereClause, orderStr) } //construct prepared statement and if where clause does exist add parameters var stmt *sqlx.Stmt - var err error stmt, err = s.db.Preparex(query) if err != nil { @@ -181,8 +219,13 @@ func (s *SqlDatabase) buildIssueMatchStatement(baseQuery string, filter *entity. filterParameters = buildQueryParametersCount(filterParameters, filter.Search, wildCardFilterParamCount) if withCursor { - filterParameters = append(filterParameters, cursor.Value) - filterParameters = append(filterParameters, cursor.Limit) + p := CreateCursorParameters([]any{}, cursorFields) + filterParameters = append(filterParameters, p...) + if filter.PaginatedX.First == nil { + filterParameters = append(filterParameters, 1000) + } else { + filterParameters = append(filterParameters, filter.PaginatedX.First) + } } return stmt, filterParameters, nil @@ -195,12 +238,12 @@ func (s *SqlDatabase) GetAllIssueMatchIds(filter *entity.IssueMatchFilter) ([]in }) baseQuery := ` - SELECT IM.issuematch_id FROM IssueMatch IM + SELECT IM.issuematch_id %s FROM IssueMatch IM %s - %s GROUP BY IM.issuematch_id ORDER BY IM.issuematch_id + %s GROUP BY IM.issuematch_id ORDER BY %s ` - stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, false, l) + stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, false, []entity.Order{}, l) if err != nil { return nil, err @@ -209,19 +252,65 @@ func (s *SqlDatabase) GetAllIssueMatchIds(filter *entity.IssueMatchFilter) ([]in return performIdScan(stmt, filterParameters, l) } -func (s *SqlDatabase) GetIssueMatches(filter *entity.IssueMatchFilter) ([]entity.IssueMatch, error) { +func (s *SqlDatabase) GetAllIssueMatchCursors(filter *entity.IssueMatchFilter, order []entity.Order) ([]string, error) { + l := logrus.WithFields(logrus.Fields{ + "filter": filter, + "event": "database.GetIssueAllIssueMatchCursors", + }) + + baseQuery := ` + SELECT IM.* %s FROM IssueMatch IM + %s + %s GROUP BY IM.issuematch_id ORDER BY %s + ` + + stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, false, order, l) + + if err != nil { + return nil, err + } + + rows, err := performListScan( + stmt, + filterParameters, + l, + func(l []RowComposite, e RowComposite) []RowComposite { + return append(l, e) + }, + ) + + if err != nil { + return nil, err + } + + return lo.Map(rows, func(row RowComposite, _ int) string { + im := row.AsIssueMatch() + if row.IssueRow != nil { + im.Issue = lo.ToPtr(row.IssueRow.AsIssue()) + } + if row.ComponentInstanceRow != nil { + im.ComponentInstance = lo.ToPtr(row.ComponentInstanceRow.AsComponentInstance()) + } + + cursor, _ := EncodeCursor(WithIssueMatch(order, im)) + + return cursor + }), nil +} + +func (s *SqlDatabase) GetIssueMatches(filter *entity.IssueMatchFilter, order []entity.Order) ([]entity.IssueMatchResult, error) { l := logrus.WithFields(logrus.Fields{ "filter": filter, "event": "database.GetIssueMatches", }) baseQuery := ` - SELECT IM.* FROM IssueMatch IM + SELECT IM.* %s FROM IssueMatch IM %s - %s %s GROUP BY IM.issuematch_id ORDER BY IM.issuematch_id LIMIT ? + %s %s GROUP BY IM.issuematch_id ORDER BY %s LIMIT ? ` - stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, true, l) + stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, true, order, l) if err != nil { return nil, err @@ -231,8 +320,24 @@ func (s *SqlDatabase) GetIssueMatches(filter *entity.IssueMatchFilter) ([]entity stmt, filterParameters, l, - func(l []entity.IssueMatch, e IssueMatchRow) []entity.IssueMatch { - return append(l, e.AsIssueMatch()) + func(l []entity.IssueMatchResult, e RowComposite) []entity.IssueMatchResult { + im := e.AsIssueMatch() + if e.IssueRow != nil { + im.Issue = lo.ToPtr(e.IssueRow.AsIssue()) + } + if e.ComponentInstanceRow != nil { + im.ComponentInstance = lo.ToPtr(e.ComponentInstanceRow.AsComponentInstance()) + } + + cursor, _ := EncodeCursor(WithIssueMatch(order, im)) + + imr := entity.IssueMatchResult{ + WithCursor: entity.WithCursor{ + Value: cursor, + }, + IssueMatch: &im, + } + return append(l, imr) }, ) } @@ -244,12 +349,13 @@ func (s *SqlDatabase) CountIssueMatches(filter *entity.IssueMatchFilter) (int64, }) baseQuery := ` - SELECT count(distinct IM.issuematch_id) FROM IssueMatch IM + SELECT count(distinct IM.issuematch_id) %s FROM IssueMatch IM %s %s + ORDER BY %s ` - stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, false, l) + stmt, filterParameters, err := s.buildIssueMatchStatement(baseQuery, filter, false, []entity.Order{}, l) if err != nil { return -1, err diff --git a/internal/database/mariadb/issue_match_test.go b/internal/database/mariadb/issue_match_test.go index dcbc0b40..3bbf7ea9 100644 --- a/internal/database/mariadb/issue_match_test.go +++ b/internal/database/mariadb/issue_match_test.go @@ -4,7 +4,14 @@ package mariadb_test import ( + "database/sql" + "encoding/json" "math/rand" + "os" + "path/filepath" + "runtime" + "sort" + "time" "github.com/samber/lo" @@ -101,8 +108,8 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { It("can filter by a single issue id that does exist", func() { issueMatch := seedCollection.IssueMatchRows[rand.Intn(len(seedCollection.IssueMatchRows))] filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{}, - IssueId: []*int64{&issueMatch.IssueId.Int64}, + PaginatedX: entity.PaginatedX{}, + IssueId: []*int64{&issueMatch.IssueId.Int64}, } var imIds []int64 @@ -135,7 +142,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { When("Getting IssueMatches", Label("GetIssueMatches"), func() { Context("and the database is empty", func() { It("can perform the query", func() { - res, err := db.GetIssueMatches(nil) + res, err := db.GetIssueMatches(nil, nil) By("throwing no error", func() { Expect(err).To(BeNil()) @@ -151,12 +158,13 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { BeforeEach(func() { seedCollection = seeder.SeedDbWithNFakeData(10) + seedCollection.GetValidIssueMatchRows() issueMatches = seedCollection.GetValidIssueMatchRows() }) Context("and using no filter", func() { It("can fetch the items correctly", func() { - res, err := db.GetIssueMatches(nil) + res, err := db.GetIssueMatches(nil, nil) By("throwing no error", func() { Expect(err).Should(BeNil()) @@ -196,7 +204,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { Id: []*int64{&im.Id.Int64}, } - entries, err := db.GetIssueMatches(filter) + entries, err := db.GetIssueMatches(filter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) @@ -213,8 +221,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { It("can filter by a single issue id that does exist", func() { issueMatch := seedCollection.IssueMatchRows[rand.Intn(len(seedCollection.IssueMatchRows))] filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{}, - IssueId: []*int64{&issueMatch.IssueId.Int64}, + IssueId: []*int64{&issueMatch.IssueId.Int64}, } var imIds []int64 @@ -224,7 +231,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { } } - entries, err := db.GetIssueMatches(filter) + entries, err := db.GetIssueMatches(filter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) @@ -243,7 +250,6 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { It("can filter by a single component instance id that does exist", func() { issueMatch := seedCollection.IssueMatchRows[rand.Intn(len(seedCollection.IssueMatchRows))] filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{}, ComponentInstanceId: []*int64{&issueMatch.ComponentInstanceId.Int64}, } @@ -254,7 +260,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { } } - entries, err := db.GetIssueMatches(filter) + entries, err := db.GetIssueMatches(filter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) @@ -273,7 +279,6 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { It("can filter by a single evidence id that does exist", func() { issueMatch := seedCollection.IssueMatchEvidenceRows[rand.Intn(len(seedCollection.IssueMatchEvidenceRows))] filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{}, EvidenceId: []*int64{&issueMatch.EvidenceId.Int64}, } @@ -284,7 +289,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { } } - entries, err := db.GetIssueMatches(filter) + entries, err := db.GetIssueMatches(filter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) @@ -316,13 +321,12 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { }) filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{}, SupportGroupCCRN: []*string{&supportGroup.CCRN.String}, } // fixture creation does not guarantee that a support group is always present if sgFound { - entries, err := db.GetIssueMatches(filter) + entries, err := db.GetIssueMatches(filter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) @@ -333,7 +337,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { }) By("entries contain vm", func() { - _, found := lo.Find(entries, func(e entity.IssueMatch) bool { + _, found := lo.Find(entries, func(e entity.IssueMatchResult) bool { return e.Id == issueMatch.Id.Int64 }) Expect(found).To(BeTrue()) @@ -342,17 +346,21 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { }) Context("and and we use Pagination", func() { DescribeTable("can correctly paginate ", func(pageSize int) { - test.TestPaginationOfList( + test.TestPaginationOfListWithOrder( db.GetIssueMatches, - func(first *int, after *int64) *entity.IssueMatchFilter { + func(first *int, after *int64, afterX *string) *entity.IssueMatchFilter { return &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: first, - After: after, + After: afterX, }, } }, - func(entries []entity.IssueMatch) *int64 { return &entries[len(entries)-1].Id }, + []entity.Order{}, + func(entries []entity.IssueMatchResult) string { + after, _ := mariadb.EncodeCursor(mariadb.WithIssueMatch([]entity.Order{}, *entries[len(entries)-1].IssueMatch)) + return after + }, len(issueMatches), pageSize, ) @@ -367,6 +375,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { }) }) }) + When("Counting Issue Matches", Label("CountIssueMatches"), func() { Context("and using no filter", func() { DescribeTable("it returns correct count", func(x int) { @@ -395,9 +404,9 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { }) It("does not influence the count when pagination is applied", func() { var first = 1 - var after int64 = 0 + var after string = "" filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{ + PaginatedX: entity.PaginatedX{ First: &first, After: &after, }, @@ -415,8 +424,8 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { It("does show the correct amount when filtering for an issue", func() { issueMatch := seedCollection.IssueMatchRows[rand.Intn(len(seedCollection.IssueMatchRows))] filter := &entity.IssueMatchFilter{ - Paginated: entity.Paginated{}, - IssueId: []*int64{&issueMatch.IssueId.Int64}, + PaginatedX: entity.PaginatedX{}, + IssueId: []*int64{&issueMatch.IssueId.Int64}, } var imIds []int64 @@ -472,7 +481,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { Id: []*int64{&issueMatch.Id}, } - im, err := db.GetIssueMatches(issueMatchFilter) + im, err := db.GetIssueMatches(issueMatchFilter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) }) @@ -517,7 +526,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { Id: []*int64{&issueMatch.Id}, } - im, err := db.GetIssueMatches(issueMatchFilter) + im, err := db.GetIssueMatches(issueMatchFilter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) }) @@ -556,7 +565,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { Id: []*int64{&issueMatch.Id}, } - im, err := db.GetIssueMatches(issueMatchFilter) + im, err := db.GetIssueMatches(issueMatchFilter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) }) @@ -597,7 +606,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { EvidenceId: []*int64{&evidence.Id}, } - im, err := db.GetIssueMatches(issueMatchFilter) + im, err := db.GetIssueMatches(issueMatchFilter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) }) @@ -626,7 +635,7 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { EvidenceId: []*int64{&issueMatchEvidenceRow.EvidenceId.Int64}, } - issueMatches, err := db.GetIssueMatches(issueMatchFilter) + issueMatches, err := db.GetIssueMatches(issueMatchFilter, nil) By("throwing no error", func() { Expect(err).To(BeNil()) }) @@ -638,3 +647,514 @@ var _ = Describe("IssueMatch", Label("database", "IssueMatch"), func() { }) }) }) + +var _ = Describe("Ordering IssueMatches", func() { + var db *mariadb.SqlDatabase + var seeder *test.DatabaseSeeder + var seedCollection *test.SeedCollection + + BeforeEach(func() { + var err error + db = dbm.NewTestSchema() + seeder, err = test.NewDatabaseSeeder(dbm.DbConfig()) + Expect(err).To(BeNil(), "Database Seeder Setup should work") + }) + + var testOrder = func( + order []entity.Order, + verifyFunc func(res []entity.IssueMatchResult), + ) { + res, err := db.GetIssueMatches(nil, order) + + By("throwing no error", func() { + Expect(err).Should(BeNil()) + }) + + By("returning the correct number of results", func() { + Expect(len(res)).Should(BeIdenticalTo(len(seedCollection.IssueMatchRows))) + }) + + By("returning the correct order", func() { + verifyFunc(res) + }) + } + + When("with ASC order", Label("IssueMatchASCOrder"), func() { + + BeforeEach(func() { + seedCollection = seeder.SeedDbWithNFakeData(10) + seedCollection.GetValidIssueMatchRows() + }) + + It("can order by id", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + return seedCollection.IssueMatchRows[i].Id.Int64 < seedCollection.IssueMatchRows[j].Id.Int64 + }) + + order := []entity.Order{ + {By: entity.IssueMatchId, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + for i, r := range res { + Expect(r.Id).Should(BeEquivalentTo(seedCollection.IssueMatchRows[i].Id.Int64)) + } + }) + }) + + It("can order by primaryName", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + issueI := seedCollection.GetIssueById(seedCollection.IssueMatchRows[i].IssueId.Int64) + issueJ := seedCollection.GetIssueById(seedCollection.IssueMatchRows[j].IssueId.Int64) + return issueI.PrimaryName.String < issueJ.PrimaryName.String + }) + + order := []entity.Order{ + {By: entity.IssuePrimaryName, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prev string = "" + for _, r := range res { + issue := seedCollection.GetIssueById(r.IssueId) + Expect(issue).ShouldNot(BeNil()) + Expect(issue.PrimaryName.String >= prev).Should(BeTrue()) + prev = issue.PrimaryName.String + } + }) + }) + + It("can order by targetRemediationDate", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + return seedCollection.IssueMatchRows[i].TargetRemediationDate.Time.After(seedCollection.IssueMatchRows[j].TargetRemediationDate.Time) + }) + + order := []entity.Order{ + {By: entity.IssueMatchTargetRemediationDate, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prev time.Time = time.Time{} + for _, r := range res { + Expect(r.TargetRemediationDate.After(prev)).Should(BeTrue()) + prev = r.TargetRemediationDate + + } + }) + }) + + It("can order by rating", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + r1 := test.SeverityToNumerical(seedCollection.IssueMatchRows[i].Rating.String) + r2 := test.SeverityToNumerical(seedCollection.IssueMatchRows[j].Rating.String) + return r1 < r2 + }) + + order := []entity.Order{ + {By: entity.IssueMatchRating, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + for i, r := range res { + Expect(r.Id).Should(BeEquivalentTo(seedCollection.IssueMatchRows[i].Id.Int64)) + } + }) + }) + + It("can order by component instance ccrn", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + ciI := seedCollection.GetComponentInstanceById(seedCollection.IssueMatchRows[i].ComponentInstanceId.Int64) + ciJ := seedCollection.GetComponentInstanceById(seedCollection.IssueMatchRows[j].ComponentInstanceId.Int64) + return ciI.CCRN.String < ciJ.CCRN.String + }) + + order := []entity.Order{ + {By: entity.ComponentInstanceCcrn, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prev string = "" + for _, r := range res { + ci := seedCollection.GetComponentInstanceById(r.ComponentInstanceId) + Expect(ci).ShouldNot(BeNil()) + Expect(ci.CCRN.String >= prev).Should(BeTrue()) + prev = ci.CCRN.String + } + }) + }) + }) + + When("with DESC order", Label("IssueMatchDESCOrder"), func() { + + BeforeEach(func() { + seedCollection = seeder.SeedDbWithNFakeData(10) + }) + + It("can order by id", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + return seedCollection.IssueMatchRows[i].Id.Int64 > seedCollection.IssueMatchRows[j].Id.Int64 + }) + + order := []entity.Order{ + {By: entity.IssueMatchId, Direction: entity.OrderDirectionDesc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + for i, r := range res { + Expect(r.Id).Should(BeEquivalentTo(seedCollection.IssueMatchRows[i].Id.Int64)) + } + }) + }) + + It("can order by primaryName", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + issueI := seedCollection.GetIssueById(seedCollection.IssueMatchRows[i].IssueId.Int64) + issueJ := seedCollection.GetIssueById(seedCollection.IssueMatchRows[j].IssueId.Int64) + return issueI.PrimaryName.String > issueJ.PrimaryName.String + }) + + order := []entity.Order{ + {By: entity.IssuePrimaryName, Direction: entity.OrderDirectionDesc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prev string = "\U0010FFFF" + for _, r := range res { + issue := seedCollection.GetIssueById(r.IssueId) + Expect(issue).ShouldNot(BeNil()) + Expect(issue.PrimaryName.String <= prev).Should(BeTrue()) + prev = issue.PrimaryName.String + } + }) + }) + + It("can order by targetRemediationDate", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + return seedCollection.IssueMatchRows[i].TargetRemediationDate.Time.Before(seedCollection.IssueMatchRows[j].TargetRemediationDate.Time) + }) + + order := []entity.Order{ + {By: entity.IssueMatchTargetRemediationDate, Direction: entity.OrderDirectionDesc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prev time.Time = time.Date(9999, 12, 31, 23, 59, 59, 999999999, time.UTC) + for _, r := range res { + Expect(r.TargetRemediationDate.Before(prev)).Should(BeTrue()) + prev = r.TargetRemediationDate + + } + }) + }) + + It("can order by rating", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + r1 := test.SeverityToNumerical(seedCollection.IssueMatchRows[i].Rating.String) + r2 := test.SeverityToNumerical(seedCollection.IssueMatchRows[j].Rating.String) + return r1 > r2 + }) + + order := []entity.Order{ + {By: entity.IssueMatchRating, Direction: entity.OrderDirectionDesc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + for i, r := range res { + Expect(r.Id).Should(BeEquivalentTo(seedCollection.IssueMatchRows[i].Id.Int64)) + } + }) + }) + + It("can order by component instance ccrn", func() { + sort.Slice(seedCollection.IssueMatchRows, func(i, j int) bool { + ciI := seedCollection.GetComponentInstanceById(seedCollection.IssueMatchRows[i].ComponentInstanceId.Int64) + ciJ := seedCollection.GetComponentInstanceById(seedCollection.IssueMatchRows[j].ComponentInstanceId.Int64) + return ciI.CCRN.String > ciJ.CCRN.String + }) + + order := []entity.Order{ + {By: entity.ComponentInstanceCcrn, Direction: entity.OrderDirectionDesc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prev string = "\U0010FFFF" + for _, r := range res { + ci := seedCollection.GetComponentInstanceById(r.ComponentInstanceId) + Expect(ci).ShouldNot(BeNil()) + Expect(ci.CCRN.String <= prev).Should(BeTrue()) + prev = ci.CCRN.String + } + }) + }) + }) + + When("multiple order by used", Label("IssueMatchMultipleOrderBy"), func() { + + BeforeEach(func() { + users := seeder.SeedUsers(10) + services := seeder.SeedServices(10) + components := seeder.SeedComponents(10) + componentVersions := seeder.SeedComponentVersions(10, components) + componentInstances := seeder.SeedComponentInstances(3, componentVersions, services) + issues := seeder.SeedIssues(3) + issueMatches := seeder.SeedIssueMatches(100, issues, componentInstances, users) + seedCollection = &test.SeedCollection{ + IssueRows: issues, + IssueMatchRows: issueMatches, + ComponentInstanceRows: componentInstances, + } + }) + + It("can order by asc issue primary name and asc targetRemediationDate", func() { + order := []entity.Order{ + {By: entity.IssuePrimaryName, Direction: entity.OrderDirectionAsc}, + {By: entity.IssueMatchTargetRemediationDate, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prevTrd time.Time = time.Date(9999, 12, 31, 23, 59, 59, 999999999, time.UTC) + var prevPn = "" + for _, r := range res { + issue := seedCollection.GetIssueById(r.IssueId) + if issue.PrimaryName.String == prevPn { + Expect(r.TargetRemediationDate.After(prevTrd)).Should(BeTrue()) + prevTrd = r.TargetRemediationDate + } else { + Expect(issue.PrimaryName.String > prevPn).To(BeTrue()) + prevTrd = time.Time{} + } + prevPn = issue.PrimaryName.String + } + }) + }) + + It("can order by asc issue primary name and desc targetRemediationDate", func() { + order := []entity.Order{ + {By: entity.IssuePrimaryName, Direction: entity.OrderDirectionAsc}, + {By: entity.IssueMatchTargetRemediationDate, Direction: entity.OrderDirectionDesc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prevTrd time.Time = time.Date(9999, 12, 31, 23, 59, 59, 999999999, time.UTC) + var prevPn = "" + for _, r := range res { + issue := seedCollection.GetIssueById(r.IssueId) + if issue.PrimaryName.String == prevPn { + Expect(r.TargetRemediationDate.Before(prevTrd)).Should(BeTrue()) + prevTrd = r.TargetRemediationDate + } else { + Expect(issue.PrimaryName.String > prevPn).To(BeTrue()) + prevTrd = time.Date(9999, 12, 31, 23, 59, 59, 999999999, time.UTC) + } + prevPn = issue.PrimaryName.String + } + }) + }) + + It("can order by asc rating and asc component instance ccrn and asc targetRemediationDate", func() { + order := []entity.Order{ + {By: entity.IssueMatchRating, Direction: entity.OrderDirectionAsc}, + {By: entity.ComponentInstanceCcrn, Direction: entity.OrderDirectionAsc}, + {By: entity.IssueMatchTargetRemediationDate, Direction: entity.OrderDirectionAsc}, + } + + testOrder(order, func(res []entity.IssueMatchResult) { + var prevSeverity = 0 + var prevCiCcrn = "" + var prevTrd time.Time = time.Time{} + for _, r := range res { + ci := seedCollection.GetComponentInstanceById(r.ComponentInstanceId) + if test.SeverityToNumerical(r.Severity.Value) == prevSeverity { + if ci.CCRN.String == prevCiCcrn { + Expect(r.TargetRemediationDate.After(prevTrd)).To(BeTrue()) + prevTrd = r.TargetRemediationDate + } else { + Expect(ci.CCRN.String > prevCiCcrn).To(BeTrue()) + prevCiCcrn = ci.CCRN.String + prevTrd = time.Time{} + } + } else { + Expect(test.SeverityToNumerical(r.Severity.Value) > prevSeverity).To(BeTrue()) + prevSeverity = test.SeverityToNumerical(r.Severity.Value) + prevCiCcrn = "" + prevTrd = time.Time{} + } + } + }) + }) + }) +}) + +// getTestDataPath returns the path to the test data directory relative to the calling file +func getTestDataPath(f string) string { + // Get the current file path + _, filename, _, _ := runtime.Caller(1) + // Get the directory containing the current file + dir := filepath.Dir(filename) + // Return path to test data directory (adjust the relative path as needed) + return filepath.Join(dir, "testdata", "issue_match_cursor", f) +} + +// LoadIssueMatches loads issue matches from JSON file +func LoadIssueMatches(filename string) ([]mariadb.IssueMatchRow, error) { + // Read JSON file + data, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + // Parse JSON into temporary struct that matches the JSON format + type tempIssueMatch struct { + Status string `json:"status"` + Rating string `json:"rating"` + Vector string `json:"vector"` + UserID int64 `json:"user_id"` + ComponentInstanceID int64 `json:"component_instance_id"` + IssueID int64 `json:"issue_id"` + TargetRemediationDate time.Time `json:"target_remediation_date"` + } + var tempMatches []tempIssueMatch + if err := json.Unmarshal(data, &tempMatches); err != nil { + return nil, err + } + // Convert to IssueMatchRow format + matches := make([]mariadb.IssueMatchRow, len(tempMatches)) + for i, tm := range tempMatches { + matches[i] = mariadb.IssueMatchRow{ + Status: sql.NullString{String: tm.Status, Valid: true}, + Rating: sql.NullString{String: tm.Rating, Valid: true}, + Vector: sql.NullString{String: tm.Vector, Valid: true}, + UserId: sql.NullInt64{Int64: tm.UserID, Valid: true}, + ComponentInstanceId: sql.NullInt64{Int64: tm.ComponentInstanceID, Valid: true}, + IssueId: sql.NullInt64{Int64: tm.IssueID, Valid: true}, + TargetRemediationDate: sql.NullTime{Time: tm.TargetRemediationDate, Valid: true}, + } + } + return matches, nil +} + +// LoadIssues loads issues from JSON file +func LoadIssues(filename string) ([]mariadb.IssueRow, error) { + data, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + type tempIssue struct { + Type string `json:"type"` + PrimaryName string `json:"primary_name"` + Description string `json:"description"` + } + var tempIssues []tempIssue + if err := json.Unmarshal(data, &tempIssues); err != nil { + return nil, err + } + issues := make([]mariadb.IssueRow, len(tempIssues)) + for i, ti := range tempIssues { + issues[i] = mariadb.IssueRow{ + Type: sql.NullString{String: ti.Type, Valid: true}, + PrimaryName: sql.NullString{String: ti.PrimaryName, Valid: true}, + Description: sql.NullString{String: ti.Description, Valid: true}, + } + } + return issues, nil +} + +// LoadComponentInstances loads component instances from JSON file +func LoadComponentInstances(filename string) ([]mariadb.ComponentInstanceRow, error) { + data, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + type tempComponentInstance struct { + CCRN string `json:"ccrn"` + Count int16 `json:"count"` + ComponentVersionID int64 `json:"component_version_id"` + ServiceID int64 `json:"service_id"` + } + var tempComponents []tempComponentInstance + if err := json.Unmarshal(data, &tempComponents); err != nil { + return nil, err + } + components := make([]mariadb.ComponentInstanceRow, len(tempComponents)) + for i, tc := range tempComponents { + components[i] = mariadb.ComponentInstanceRow{ + CCRN: sql.NullString{String: tc.CCRN, Valid: true}, + Count: sql.NullInt16{Int16: tc.Count, Valid: true}, + ComponentVersionId: sql.NullInt64{Int64: tc.ComponentVersionID, Valid: true}, + ServiceId: sql.NullInt64{Int64: tc.ServiceID, Valid: true}, + } + } + return components, nil +} + +var _ = Describe("Using the Cursor on IssueMatches", func() { + var db *mariadb.SqlDatabase + var seeder *test.DatabaseSeeder + BeforeEach(func() { + var err error + db = dbm.NewTestSchema() + seeder, err = test.NewDatabaseSeeder(dbm.DbConfig()) + Expect(err).To(BeNil(), "Database Seeder Setup should work") + }) + var loadTestData = func() ([]mariadb.IssueMatchRow, []mariadb.IssueRow, []mariadb.ComponentInstanceRow, error) { + matches, err := LoadIssueMatches(getTestDataPath("issue_match.json")) + if err != nil { + return nil, nil, nil, err + } + issues, err := LoadIssues(getTestDataPath("issue.json")) + if err != nil { + return nil, nil, nil, err + } + components, err := LoadComponentInstances(getTestDataPath("component_instance.json")) + if err != nil { + return nil, nil, nil, err + } + return matches, issues, components, nil + } + When("multiple orders used", func() { + BeforeEach(func() { + seeder.SeedUsers(10) + seeder.SeedServices(10) + components := seeder.SeedComponents(10) + seeder.SeedComponentVersions(10, components) + matches, issues, cis, err := loadTestData() + Expect(err).To(BeNil()) + // Important: the order need to be preserved + for _, ci := range cis { + _, err := seeder.InsertFakeComponentInstance(ci) + Expect(err).To(BeNil()) + } + for _, issue := range issues { + _, err := seeder.InsertFakeIssue(issue) + Expect(err).To(BeNil()) + } + for _, match := range matches { + _, err := seeder.InsertFakeIssueMatch(match) + Expect(err).To(BeNil()) + } + }) + It("can order by primary name and target remediation date", func() { + filter := entity.IssueMatchFilter{ + Id: []*int64{lo.ToPtr(int64(10))}, + } + order := []entity.Order{ + {By: entity.IssuePrimaryName, Direction: entity.OrderDirectionAsc}, + {By: entity.IssueMatchTargetRemediationDate, Direction: entity.OrderDirectionAsc}, + } + im, err := db.GetIssueMatches(&filter, order) + Expect(err).To(BeNil()) + Expect(im).To(HaveLen(1)) + filterWithCursor := entity.IssueMatchFilter{ + PaginatedX: entity.PaginatedX{ + After: im[0].Cursor(), + }, + } + res, err := db.GetIssueMatches(&filterWithCursor, order) + Expect(err).To(BeNil()) + Expect(res[0].Id).To(BeEquivalentTo(13)) + Expect(res[1].Id).To(BeEquivalentTo(20)) + Expect(res[2].Id).To(BeEquivalentTo(24)) + Expect(res[3].Id).To(BeEquivalentTo(30)) + Expect(res[4].Id).To(BeEquivalentTo(5)) + }) + }) +}) diff --git a/internal/database/mariadb/order.go b/internal/database/mariadb/order.go new file mode 100644 index 00000000..973eb8e3 --- /dev/null +++ b/internal/database/mariadb/order.go @@ -0,0 +1,52 @@ +// SPDX-FileCopyrightText: 2024 SAP SE or an SAP affiliate company and Greenhouse contributors +// SPDX-License-Identifier: Apache-2.0 + +package mariadb + +import ( + "fmt" + + "github.com/cloudoperators/heureka/internal/entity" +) + +func ColumnName(f entity.OrderByField) string { + switch f { + case entity.ComponentInstanceCcrn: + return "componentinstance_ccrn" + case entity.IssuePrimaryName: + return "issue_primary_name" + case entity.IssueMatchId: + return "issuematch_id" + case entity.IssueMatchRating: + return "issuematch_rating" + case entity.IssueMatchTargetRemediationDate: + return "issuematch_target_remediation_date" + case entity.SupportGroupName: + return "supportgroup_name" + default: + return "" + } +} + +func OrderDirectionStr(dir entity.OrderDirection) string { + switch dir { + case entity.OrderDirectionAsc: + return "ASC" + case entity.OrderDirectionDesc: + return "DESC" + default: + return "" + } +} + +func CreateOrderString(order []entity.Order) string { + orderStr := "" + for i, o := range order { + if i > 0 { + orderStr = fmt.Sprintf("%s, %s %s", orderStr, ColumnName(o.By), OrderDirectionStr(o.Direction)) + } else { + orderStr = fmt.Sprintf("%s %s %s", orderStr, ColumnName(o.By), OrderDirectionStr(o.Direction)) + } + } + return orderStr +} diff --git a/internal/database/mariadb/test/common.go b/internal/database/mariadb/test/common.go index c050e0e9..1062c83c 100644 --- a/internal/database/mariadb/test/common.go +++ b/internal/database/mariadb/test/common.go @@ -8,6 +8,42 @@ import ( . "github.com/onsi/gomega" ) +// Temporary used until order is used in all entities +func TestPaginationOfListWithOrder[F entity.HeurekaFilter, E entity.HeurekaEntity]( + listFunction func(*F, []entity.Order) ([]E, error), + filterFunction func(*int, *int64, *string) *F, + order []entity.Order, + getAfterFunction func([]E) string, + elementCount int, + pageSize int, +) { + quotient, remainder := elementCount/pageSize, elementCount%pageSize + expectedPages := quotient + if remainder > 0 { + expectedPages = expectedPages + 1 + } + + var after *int64 + var afterS string + for i := expectedPages; i > 0; i-- { + entries, err := listFunction(filterFunction(&pageSize, after, &afterS), order) + + Expect(err).To(BeNil()) + + if i == 1 && remainder > 0 { + Expect(len(entries)).To(BeEquivalentTo(remainder), "on the last page we expect") + } else { + if pageSize > elementCount { + Expect(len(entries)).To(BeEquivalentTo(elementCount), "on a page with a higher pageSize then element count we expect") + } else { + Expect(len(entries)).To(BeEquivalentTo(pageSize), "on a normal page we expect the element count to be equal to the page size") + + } + } + afterS = getAfterFunction(entries) + } +} + func TestPaginationOfList[F entity.HeurekaFilter, E entity.HeurekaEntity]( listFunction func(*F) ([]E, error), filterFunction func(*int, *int64) *F, @@ -41,3 +77,23 @@ func TestPaginationOfList[F entity.HeurekaFilter, E entity.HeurekaEntity]( } } + +// DB stores rating as enum +// entity.Severity.Score is based on CVSS vector and has a range between x and y +// This means a rating "Low" can have a Score 3.1, 3.3, ... +// Ordering is done based on enum on DB layer, so Score can't be used for checking order +// and needs a numerical translation +func SeverityToNumerical(s string) int { + rating := map[string]int{ + "None": 0, + "Low": 1, + "Medium": 2, + "High": 3, + "Critical": 4, + } + if val, ok := rating[s]; ok { + return val + } else { + return -1 + } +} diff --git a/internal/database/mariadb/test/fixture.go b/internal/database/mariadb/test/fixture.go index 3801516b..71fe8e32 100644 --- a/internal/database/mariadb/test/fixture.go +++ b/internal/database/mariadb/test/fixture.go @@ -9,7 +9,8 @@ import ( "math/rand" "strings" - "github.com/cloudoperators/heureka/internal/e2e/common" + e2e_common "github.com/cloudoperators/heureka/internal/e2e/common" + "github.com/cloudoperators/heureka/internal/entity" "github.com/goark/go-cvss/v3/metric" "github.com/onsi/ginkgo/v2/dsl/core" @@ -46,6 +47,24 @@ type SeedCollection struct { IssueRepositoryServiceRows []mariadb.IssueRepositoryServiceRow } +func (s *SeedCollection) GetComponentInstanceById(id int64) *mariadb.ComponentInstanceRow { + for _, ci := range s.ComponentInstanceRows { + if ci.Id.Int64 == id { + return &ci + } + } + return nil +} + +func (s *SeedCollection) GetIssueById(id int64) *mariadb.IssueRow { + for _, issue := range s.IssueRows { + if issue.Id.Int64 == id { + return &issue + } + } + return nil +} + func (s *SeedCollection) GetIssueVariantsByIssueId(id int64) []mariadb.IssueVariantRow { var r []mariadb.IssueVariantRow for _, iv := range s.IssueVariantRows { diff --git a/internal/database/mariadb/testdata/issue_match_cursor/component_instance.json b/internal/database/mariadb/testdata/issue_match_cursor/component_instance.json new file mode 100644 index 00000000..7fdd279d --- /dev/null +++ b/internal/database/mariadb/testdata/issue_match_cursor/component_instance.json @@ -0,0 +1,122 @@ +[ + { + "ccrn": "ccrn: spec=test, kind=pod, id=1", + "count": 2, + "component_version_id": 1, + "service_id": 1 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=2", + "count": 1, + "component_version_id": 1, + "service_id": 1 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=3", + "count": 3, + "component_version_id": 2, + "service_id": 2 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=4", + "count": 1, + "component_version_id": 2, + "service_id": 2 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=5", + "count": 2, + "component_version_id": 3, + "service_id": 3 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=6", + "count": 1, + "component_version_id": 3, + "service_id": 3 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=7", + "count": 4, + "component_version_id": 4, + "service_id": 4 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=8", + "count": 2, + "component_version_id": 4, + "service_id": 4 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=9", + "count": 6, + "component_version_id": 5, + "service_id": 5 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=10", + "count": 2, + "component_version_id": 5, + "service_id": 5 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=11", + "count": 3, + "component_version_id": 6, + "service_id": 6 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=12", + "count": 1, + "component_version_id": 6, + "service_id": 6 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=13", + "count": 4, + "component_version_id": 7, + "service_id": 7 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=14", + "count": 2, + "component_version_id": 7, + "service_id": 7 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=15", + "count": 3, + "component_version_id": 8, + "service_id": 8 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=16", + "count": 1, + "component_version_id": 8, + "service_id": 8 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=17", + "count": 5, + "component_version_id": 9, + "service_id": 9 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=18", + "count": 2, + "component_version_id": 9, + "service_id": 9 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=19", + "count": 4, + "component_version_id": 10, + "service_id": 10 + }, + { + "ccrn": "ccrn: spec=test, kind=pod, id=20", + "count": 2, + "component_version_id": 10, + "service_id": 10 + } +] diff --git a/internal/database/mariadb/testdata/issue_match_cursor/issue.json b/internal/database/mariadb/testdata/issue_match_cursor/issue.json new file mode 100644 index 00000000..63820de9 --- /dev/null +++ b/internal/database/mariadb/testdata/issue_match_cursor/issue.json @@ -0,0 +1,102 @@ +[ + { + "type": "vulnerability", + "primary_name": "CVE-2024-0001", + "description": "Authentication bypass vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0002", + "description": "Buffer overflow vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0003", + "description": "Command injection vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0004", + "description": "Cross-site request forgery" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0005", + "description": "Cross-site scripting vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0006", + "description": "Denial of service vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0007", + "description": "Directory traversal vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0008", + "description": "File inclusion vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0009", + "description": "Information disclosure vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0010", + "description": "Insecure direct object reference" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0011", + "description": "Memory leak vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0012", + "description": "Open redirect vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0013", + "description": "Path traversal vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0014", + "description": "Race condition vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0015", + "description": "Remote code execution vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0016", + "description": "Server-side request forgery" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0017", + "description": "SQL injection vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0018", + "description": "Timing attack vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0019", + "description": "XML external entity vulnerability" + }, + { + "type": "vulnerability", + "primary_name": "CVE-2024-0020", + "description": "Zero-day vulnerability" + } +] diff --git a/internal/database/mariadb/testdata/issue_match_cursor/issue_match.json b/internal/database/mariadb/testdata/issue_match_cursor/issue_match.json new file mode 100644 index 00000000..5512ec2e --- /dev/null +++ b/internal/database/mariadb/testdata/issue_match_cursor/issue_match.json @@ -0,0 +1,272 @@ +[ + { + "status": "Open", + "rating": "Critical", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + "user_id": 1, + "component_instance_id": 1, + "issue_id": 1, + "target_remediation_date": "2024-02-01T00:00:00Z" + }, + { + "status": "Open", + "rating": "Critical", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + "user_id": 2, + "component_instance_id": 2, + "issue_id": 2, + "target_remediation_date": "2024-02-05T00:00:00Z" + }, + { + "status": "Open", + "rating": "Critical", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + "user_id": 3, + "component_instance_id": 3, + "issue_id": 3, + "target_remediation_date": "2024-02-10T00:00:00Z" + }, + { + "status": "Open", + "rating": "Critical", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + "user_id": 4, + "component_instance_id": 4, + "issue_id": 4, + "target_remediation_date": "2024-02-15T00:00:00Z" + }, + { + "status": "Open", + "rating": "Critical", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + "user_id": 5, + "component_instance_id": 5, + "issue_id": 5, + "target_remediation_date": "2024-02-20T00:00:00Z" + }, + { + "status": "Open", + "rating": "Critical", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", + "user_id": 6, + "component_instance_id": 6, + "issue_id": 6, + "target_remediation_date": "2024-02-25T00:00:00Z" + }, + { + "status": "Open", + "rating": "High", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L", + "user_id": 7, + "component_instance_id": 7, + "issue_id": 7, + "target_remediation_date": "2024-03-01T00:00:00Z" + }, + { + "status": "Open", + "rating": "High", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L", + "user_id": 8, + "component_instance_id": 8, + "issue_id": 8, + "target_remediation_date": "2024-03-05T00:00:00Z" + }, + { + "status": "Open", + "rating": "High", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L", + "user_id": 9, + "component_instance_id": 9, + "issue_id": 9, + "target_remediation_date": "2024-03-10T00:00:00Z" + }, + { + "status": "Open", + "rating": "High", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L", + "user_id": 10, + "component_instance_id": 10, + "issue_id": 4, + "target_remediation_date": "2024-03-15T00:00:00Z" + }, + { + "status": "Open", + "rating": "High", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L", + "user_id": 1, + "component_instance_id": 11, + "issue_id": 11, + "target_remediation_date": "2024-03-20T00:00:00Z" + }, + { + "status": "Open", + "rating": "High", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L", + "user_id": 2, + "component_instance_id": 12, + "issue_id": 12, + "target_remediation_date": "2024-03-25T00:00:00Z" + }, + { + "status": "Open", + "rating": "Medium", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:L", + "user_id": 3, + "component_instance_id": 13, + "issue_id": 4, + "target_remediation_date": "2024-03-26T00:00:00Z" + }, + { + "status": "Open", + "rating": "Medium", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:L", + "user_id": 4, + "component_instance_id": 14, + "issue_id": 14, + "target_remediation_date": "2024-03-27T00:00:00Z" + }, + { + "status": "Open", + "rating": "Medium", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:L", + "user_id": 5, + "component_instance_id": 15, + "issue_id": 15, + "target_remediation_date": "2024-03-28T00:00:00Z" + }, + { + "status": "Open", + "rating": "Medium", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:L", + "user_id": 6, + "component_instance_id": 16, + "issue_id": 16, + "target_remediation_date": "2024-03-29T00:00:00Z" + }, + { + "status": "Open", + "rating": "Medium", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:L", + "user_id": 7, + "component_instance_id": 17, + "issue_id": 17, + "target_remediation_date": "2024-03-30T00:00:00Z" + }, + { + "status": "Open", + "rating": "Medium", + "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:L", + "user_id": 8, + "component_instance_id": 18, + "issue_id": 18, + "target_remediation_date": "2024-03-31T00:00:00Z" + }, + { + "status": "Open", + "rating": "Low", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:L/A:N", + "user_id": 9, + "component_instance_id": 19, + "issue_id": 19, + "target_remediation_date": "2024-04-01T00:00:00Z" + }, + { + "status": "Open", + "rating": "Low", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:L/A:N", + "user_id": 10, + "component_instance_id": 20, + "issue_id": 4, + "target_remediation_date": "2024-04-02T00:00:00Z" + }, + { + "status": "Open", + "rating": "Low", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:L/A:N", + "user_id": 1, + "component_instance_id": 1, + "issue_id": 1, + "target_remediation_date": "2024-04-03T00:00:00Z" + }, + { + "status": "Open", + "rating": "Low", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:L/A:N", + "user_id": 2, + "component_instance_id": 2, + "issue_id": 2, + "target_remediation_date": "2024-04-04T00:00:00Z" + }, + { + "status": "Open", + "rating": "Low", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:L/A:N", + "user_id": 3, + "component_instance_id": 3, + "issue_id": 3, + "target_remediation_date": "2024-04-05T00:00:00Z" + }, + { + "status": "Open", + "rating": "Low", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:L/A:N", + "user_id": 4, + "component_instance_id": 4, + "issue_id": 4, + "target_remediation_date": "2024-04-06T00:00:00Z" + }, + { + "status": "Open", + "rating": "None", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:N/A:N", + "user_id": 5, + "component_instance_id": 5, + "issue_id": 5, + "target_remediation_date": "2024-04-07T00:00:00Z" + }, + { + "status": "Open", + "rating": "None", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:N/A:N", + "user_id": 6, + "component_instance_id": 6, + "issue_id": 6, + "target_remediation_date": "2024-04-08T00:00:00Z" + }, + { + "status": "Open", + "rating": "None", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:N/A:N", + "user_id": 7, + "component_instance_id": 7, + "issue_id": 7, + "target_remediation_date": "2024-04-09T00:00:00Z" + }, + { + "status": "Open", + "rating": "None", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:N/A:N", + "user_id": 8, + "component_instance_id": 8, + "issue_id": 8, + "target_remediation_date": "2024-04-10T00:00:00Z" + }, + { + "status": "Open", + "rating": "None", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:N/A:N", + "user_id": 9, + "component_instance_id": 9, + "issue_id": 9, + "target_remediation_date": "2024-04-11T00:00:00Z" + }, + { + "status": "Open", + "rating": "None", + "vector": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:N/I:N/A:N", + "user_id": 10, + "component_instance_id": 10, + "issue_id": 4, + "target_remediation_date": "2024-04-12T00:00:00Z" + } +] diff --git a/internal/database/mariadb/user_test.go b/internal/database/mariadb/user_test.go index edd04558..3e0453c6 100644 --- a/internal/database/mariadb/user_test.go +++ b/internal/database/mariadb/user_test.go @@ -8,7 +8,7 @@ import ( "github.com/cloudoperators/heureka/internal/database/mariadb" "github.com/cloudoperators/heureka/internal/database/mariadb/test" - "github.com/cloudoperators/heureka/internal/e2e/common" + e2e_common "github.com/cloudoperators/heureka/internal/e2e/common" "github.com/cloudoperators/heureka/internal/entity" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" diff --git a/internal/e2e/issue_match_query_test.go b/internal/e2e/issue_match_query_test.go index a83c8e86..f7de97fe 100644 --- a/internal/e2e/issue_match_query_test.go +++ b/internal/e2e/issue_match_query_test.go @@ -60,7 +60,7 @@ var _ = Describe("Getting IssueMatches via API", Label("e2e", "IssueMatches"), f req.Var("filter", map[string]string{}) req.Var("first", 10) - req.Var("after", "0") + req.Var("after", "") req.Header.Set("Cache-Control", "no-cache") ctx := context.Background() @@ -98,7 +98,7 @@ var _ = Describe("Getting IssueMatches via API", Label("e2e", "IssueMatches"), f req.Var("filter", map[string]string{}) req.Var("first", 5) - req.Var("after", "0") + req.Var("after", "") req.Header.Set("Cache-Control", "no-cache") ctx := context.Background() @@ -132,7 +132,7 @@ var _ = Describe("Getting IssueMatches via API", Label("e2e", "IssueMatches"), f req.Var("filter", map[string]string{}) req.Var("first", 5) - req.Var("after", "0") + req.Var("after", "") req.Header.Set("Cache-Control", "no-cache") @@ -201,6 +201,102 @@ var _ = Describe("Getting IssueMatches via API", Label("e2e", "IssueMatches"), f Expect(*respData.IssueMatches.PageInfo.PageNumber).To(Equal(1), "Correct page number") }) }) + Context("we use ordering", Label("withOrder.graphql"), func() { + var respData struct { + IssueMatches model.IssueMatchConnection `json:"IssueMatches"` + } + + It("can order by primaryName", Label("withOrder.graphql"), func() { + // create a queryCollection (safe to share across requests) + client := graphql.NewClient(fmt.Sprintf("http://localhost:%s/query", cfg.Port)) + + //@todo may need to make this more fault proof?! What if the test is executed from the root dir? does it still work? + b, err := os.ReadFile("../api/graphql/graph/queryCollection/issueMatch/withOrder.graphql") + + Expect(err).To(BeNil()) + str := string(b) + req := graphql.NewRequest(str) + + req.Var("filter", map[string]string{}) + req.Var("first", 10) + req.Var("after", "") + req.Var("orderBy", []map[string]string{ + {"by": "primaryName", "direction": "asc"}, + }) + + req.Header.Set("Cache-Control", "no-cache") + + ctx := context.Background() + + err = client.Run(ctx, req, &respData) + + Expect(err).To(BeNil(), "Error while unmarshaling") + + By("- returns the correct result count", func() { + Expect(respData.IssueMatches.TotalCount).To(Equal(len(seedCollection.IssueMatchRows))) + Expect(len(respData.IssueMatches.Edges)).To(Equal(10)) + }) + + By("- returns the expected content in order", func() { + var prev string = "" + for _, im := range respData.IssueMatches.Edges { + Expect(*im.Node.Issue.PrimaryName >= prev).Should(BeTrue()) + prev = *im.Node.Issue.PrimaryName + } + }) + }) + + It("can order by primaryName and targetRemediationDate", Label("withOrder.graphql"), func() { + // create a queryCollection (safe to share across requests) + client := graphql.NewClient(fmt.Sprintf("http://localhost:%s/query", cfg.Port)) + + //@todo may need to make this more fault proof?! What if the test is executed from the root dir? does it still work? + b, err := os.ReadFile("../api/graphql/graph/queryCollection/issueMatch/withOrder.graphql") + + Expect(err).To(BeNil()) + str := string(b) + req := graphql.NewRequest(str) + + req.Var("filter", map[string]string{}) + req.Var("first", 10) + req.Var("after", "") + req.Var("orderBy", []map[string]string{ + {"by": "primaryName", "direction": "asc"}, + {"by": "targetRemediationDate", "direction": "desc"}, + }) + + req.Header.Set("Cache-Control", "no-cache") + + ctx := context.Background() + + err = client.Run(ctx, req, &respData) + + Expect(err).To(BeNil(), "Error while unmarshaling") + + By("- returns the correct result count", func() { + Expect(respData.IssueMatches.TotalCount).To(Equal(len(seedCollection.IssueMatchRows))) + Expect(len(respData.IssueMatches.Edges)).To(Equal(10)) + }) + + By("- returns the expected content in order", func() { + var prevPn string = "" + var prevTrd time.Time = time.Now() + for _, im := range respData.IssueMatches.Edges { + if *im.Node.Issue.PrimaryName == prevPn { + trd, err := time.Parse("2006-01-02T15:04:05Z", *im.Node.TargetRemediationDate) + Expect(err).To(BeNil()) + Expect(trd.Before(prevTrd)).Should(BeTrue()) + prevTrd = trd + } else { + Expect(*im.Node.Issue.PrimaryName > prevPn).To(BeTrue()) + prevTrd = time.Now() + } + prevPn = *im.Node.Issue.PrimaryName + } + }) + }) + + }) }) }) }) diff --git a/internal/entity/common.go b/internal/entity/common.go index 472d8b7c..d08777df 100644 --- a/internal/entity/common.go +++ b/internal/entity/common.go @@ -52,6 +52,7 @@ type HeurekaEntity interface { IssueAggregations | Issue | IssueMatch | + IssueMatchResult | IssueMatchChange | HeurekaFilter | IssueCount | @@ -98,6 +99,7 @@ type ListOptions struct { ShowTotalCount bool `json:"show_total_count"` ShowPageInfo bool `json:"show_page_info"` IncludeAggregations bool `json:"include_aggregations"` + Order []Order } func NewListOptions() *ListOptions { @@ -105,6 +107,7 @@ func NewListOptions() *ListOptions { ShowTotalCount: false, ShowPageInfo: false, IncludeAggregations: false, + Order: []Order{}, } } @@ -142,6 +145,11 @@ type Paginated struct { After *int64 `json:"from"` } +type PaginatedX struct { + First *int `json:"first"` + After *string `json:"from"` +} + func MaxPaginated() Paginated { return Paginated{ First: util.Ptr(math.MaxInt), diff --git a/internal/entity/issue_match.go b/internal/entity/issue_match.go index 753d8d03..95e3b85c 100644 --- a/internal/entity/issue_match.go +++ b/internal/entity/issue_match.go @@ -55,7 +55,7 @@ type IssueMatch struct { } type IssueMatchFilter struct { - Paginated + PaginatedX Id []*int64 `json:"id"` AffectedServiceCCRN []*string `json:"affected_service_ccrn"` SeverityValue []*string `json:"severity_value"` diff --git a/internal/entity/order.go b/internal/entity/order.go new file mode 100644 index 00000000..ad70b5f0 --- /dev/null +++ b/internal/entity/order.go @@ -0,0 +1,30 @@ +// SPDX-FileCopyrightText: 2024 SAP SE or an SAP affiliate company and Greenhouse contributors +// SPDX-License-Identifier: Apache-2.0 + +package entity + +type OrderByField int + +const ( + ComponentInstanceCcrn OrderByField = iota + + IssuePrimaryName + + IssueMatchId + IssueMatchRating + IssueMatchTargetRemediationDate + + SupportGroupName +) + +type OrderDirection int + +const ( + OrderDirectionAsc OrderDirection = iota + OrderDirectionDesc +) + +type Order struct { + By OrderByField + Direction OrderDirection +} diff --git a/internal/entity/test/issue_match.go b/internal/entity/test/issue_match.go index 45ecfc62..6852463f 100644 --- a/internal/entity/test/issue_match.go +++ b/internal/entity/test/issue_match.go @@ -45,3 +45,10 @@ func NewRandomIssueStatus() entity.IssueMatchStatusValue { value := gofakeit.RandomString(entity.AllIssueMatchStatusValues) return entity.NewIssueMatchStatusValue(value) } + +func NewFakeIssueMatchResult() entity.IssueMatchResult { + im := NewFakeIssueMatch() + return entity.IssueMatchResult{ + IssueMatch: &im, + } +}