Added principal info cache (#173)

* added principal info cache

* updated code based on the PR comments

* updated implementation of deduplicate()

* minor changes

* [K, T] -> [K, V]
This commit is contained in:
Marko Gaćeša 2023-01-11 20:32:37 +01:00 committed by GitHub
parent d1dd518b3f
commit 24e03c03e0
11 changed files with 620 additions and 141 deletions

View File

@ -7,7 +7,6 @@ package server
import (
"context"
"github.com/harness/gitness/events"
"github.com/harness/gitness/gitrpc"
server2 "github.com/harness/gitness/gitrpc/server"
@ -103,10 +102,12 @@ func initSystem(ctx context.Context, config *types.Config) (*system, error) {
return nil, err
}
repoController := repo.ProvideController(config, provider, checkRepo, authorizer, spaceStore, repoStore, principalStore, gitrpcInterface)
pullReqStore := database.ProvidePullReqStore(db)
pullReqActivityStore := database.ProvidePullReqActivityStore(db)
principalInfoView := database.ProvidePrincipalInfoView(db)
cache := database.ProvidePrincipalInfoCache(principalInfoView)
pullReqStore := database.ProvidePullReqStore(db, cache)
pullReqActivityStore := database.ProvidePullReqActivityStore(db, cache)
pullReqReviewStore := database.ProvidePullReqReviewStore(db)
pullReqReviewerStore := database.ProvidePullReqReviewerStore(db)
pullReqReviewerStore := database.ProvidePullReqReviewerStore(db, cache)
pullreqController := pullreq.ProvideController(db, provider, authorizer, pullReqStore, pullReqActivityStore, pullReqReviewStore, pullReqReviewerStore, repoStore, principalStore, gitrpcInterface)
webhookStore := database.ProvideWebhookStore(db)
webhookExecutionStore := database.ProvideWebhookExecutionStore(db)

View File

@ -7,7 +7,6 @@ package server
import (
"context"
"github.com/harness/gitness/events"
"github.com/harness/gitness/gitrpc"
server2 "github.com/harness/gitness/gitrpc/server"
@ -64,10 +63,12 @@ func initSystem(ctx context.Context, config *types.Config) (*system, error) {
repoController := repo.ProvideController(config, provider, checkRepo, authorizer, spaceStore, repoStore, principalStore, gitrpcInterface)
checkSpace := check.ProvideSpaceCheck()
spaceController := space.ProvideController(provider, checkSpace, authorizer, spaceStore, repoStore, principalStore)
pullReqStore := database.ProvidePullReqStore(db)
pullReqActivityStore := database.ProvidePullReqActivityStore(db)
principalInfoView := database.ProvidePrincipalInfoView(db)
cache := database.ProvidePrincipalInfoCache(principalInfoView)
pullReqStore := database.ProvidePullReqStore(db, cache)
pullReqActivityStore := database.ProvidePullReqActivityStore(db, cache)
pullReqReviewStore := database.ProvidePullReqReviewStore(db)
pullReqReviewerStore := database.ProvidePullReqReviewerStore(db)
pullReqReviewerStore := database.ProvidePullReqReviewerStore(db, cache)
pullreqController := pullreq.ProvideController(db, provider, authorizer, pullReqStore, pullReqActivityStore, pullReqReviewStore, pullReqReviewerStore, repoStore, principalStore, gitrpcInterface)
webhookStore := database.ProvideWebhookStore(db)
webhookExecutionStore := database.ProvideWebhookExecutionStore(db)

203
internal/cache/cache.go vendored Normal file
View File

@ -0,0 +1,203 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package cache
import (
"context"
"fmt"
"sort"
"sync"
"time"
"golang.org/x/exp/constraints"
)
// Cache is a generic cache that stores objects for the specified period.
// The Cache has no maximum capacity, so the idea is to store objects for short period.
// The goal of the Cache is to reduce database load.
// Every instance of Cache has a background routine that purges stale items.
type Cache[K constraints.Ordered, V Identifiable[K]] struct {
mx sync.RWMutex
cache map[K]cacheEntry[K, V]
purgeStop chan struct{}
getter Getter[K, V]
maxAge time.Duration
countHit int64
countMiss int64
}
type Identifiable[K constraints.Ordered] interface {
Identifier() K
}
type Getter[K constraints.Ordered, V Identifiable[K]] interface {
Find(ctx context.Context, id K) (V, error)
FindMany(ctx context.Context, ids []K) ([]V, error)
}
type cacheEntry[K constraints.Ordered, V Identifiable[K]] struct {
added time.Time
data V
}
// New creates a new Cache instance and a background routine
// that periodically purges stale items.
func New[K constraints.Ordered, V Identifiable[K]](getter Getter[K, V], maxAge time.Duration) *Cache[K, V] {
c := &Cache[K, V]{
cache: make(map[K]cacheEntry[K, V]),
purgeStop: make(chan struct{}),
getter: getter,
maxAge: maxAge,
}
go c.purger()
return c
}
// purger periodically evicts stale items from the Cache.
func (c *Cache[K, V]) purger() {
purgeTick := time.NewTicker(time.Minute)
defer purgeTick.Stop()
for {
select {
case <-c.purgeStop:
return
case now := <-purgeTick.C:
c.mx.Lock()
for id, v := range c.cache {
if now.Sub(v.added) >= c.maxAge {
delete(c.cache, id)
}
}
c.mx.Unlock()
}
}
}
// Stop stops the internal purger of stale elements.
func (c *Cache[K, V]) Stop() {
close(c.purgeStop)
}
// Stats returns number of cache hits and misses and can be used to monitor the cache efficiency.
func (c *Cache[K, V]) Stats() (int64, int64) {
return c.countHit, c.countMiss
}
func (c *Cache[K, V]) fetch(id K, now time.Time) (V, bool) {
c.mx.RLock()
defer c.mx.RUnlock()
item, ok := c.cache[id]
if !ok || now.Sub(item.added) > c.maxAge {
c.countMiss++
var nothing V
return nothing, false
}
c.countHit++
// we deliberately don'V update the `item.added` timestamp for `now` because
// we want to cache the items only for a short period.
return item.data, true
}
// Map returns map with all objects requested through the slice of IDs.
func (c *Cache[K, V]) Map(ctx context.Context, ids []K) (map[K]V, error) {
m := make(map[K]V)
now := time.Now()
ids = deduplicate(ids)
// Check what's already available in the cache.
var idx int
for idx < len(ids) {
id := ids[idx]
item, ok := c.fetch(id, now)
if !ok {
idx++
continue
}
// found in cache: Add to the result map and remove the ID from the list.
m[id] = item
ids[idx] = ids[len(ids)-1]
ids = ids[:len(ids)-1]
}
if len(ids) == 0 {
return m, nil
}
// Pull entries from the getter that are not in the cache.
items, err := c.getter.FindMany(ctx, ids)
if err != nil {
return nil, fmt.Errorf("cache: failed to find many: %w", err)
}
c.mx.Lock()
defer c.mx.Unlock()
for _, item := range items {
id := item.Identifier()
m[id] = item
c.cache[id] = cacheEntry[K, V]{
added: now,
data: item,
}
}
return m, nil
}
// Get returns one object by its ID.
func (c *Cache[K, V]) Get(ctx context.Context, id K) (V, error) {
now := time.Now()
var nothing V
item, ok := c.fetch(id, now)
if ok {
return item, nil
}
item, err := c.getter.Find(ctx, id)
if err != nil {
return nothing, fmt.Errorf("cache: failed to find one: %w", err)
}
c.mx.Lock()
c.cache[id] = cacheEntry[K, V]{
added: now,
data: item,
}
c.mx.Unlock()
return item, nil
}
// deduplicate is a utility function that removes duplicates from slice.
func deduplicate[V constraints.Ordered](slice []V) []V {
if len(slice) <= 1 {
return slice
}
sort.Slice(slice, func(i, j int) bool { return slice[i] < slice[j] })
pointer := 0
for i := 1; i < len(slice); i++ {
if slice[pointer] != slice[i] {
pointer++
slice[pointer] = slice[i]
}
}
return slice[:pointer+1]
}

59
internal/cache/cache_test.go vendored Normal file
View File

@ -0,0 +1,59 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package cache
import (
"reflect"
"testing"
)
func TestDeduplicate(t *testing.T) {
tests := []struct {
name string
input []int
expected []int
}{
{
name: "empty",
input: nil,
expected: nil,
},
{
name: "one-element",
input: []int{1},
expected: []int{1},
},
{
name: "one-element-duplicated",
input: []int{1, 1},
expected: []int{1},
},
{
name: "two-elements",
input: []int{2, 1},
expected: []int{1, 2},
},
{
name: "three-elements",
input: []int{2, 2, 3, 3, 1, 1},
expected: []int{1, 2, 3},
},
{
name: "many-elements",
input: []int{2, 5, 1, 2, 3, 3, 4, 5, 1, 1},
expected: []int{1, 2, 3, 4, 5},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
test.input = deduplicate(test.input)
if want, got := test.expected, test.input; !reflect.DeepEqual(want, got) {
t.Errorf("failed - want=%v, got=%v", want, got)
return
}
})
}
}

View File

@ -0,0 +1,92 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package database
import (
"context"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/store/database/dbtx"
"github.com/harness/gitness/types"
"github.com/Masterminds/squirrel"
"github.com/jmoiron/sqlx"
)
var _ store.PrincipalInfoView = (*PrincipalInfoView)(nil)
// NewPrincipalInfoStore returns a new PrincipalInfoView.
// It's used by the principal info cache.
func NewPrincipalInfoStore(db *sqlx.DB) *PrincipalInfoView {
return &PrincipalInfoView{
db: db,
}
}
type PrincipalInfoView struct {
db *sqlx.DB
}
// Find returns a single principal info object by id from the `principals` database table.
func (s *PrincipalInfoView) Find(ctx context.Context, id int64) (*types.PrincipalInfo, error) {
const sqlQuery = `
SELECT principal_uid, principal_email, principal_display_name
FROM principals
WHERE principal_id = $1`
db := dbtx.GetAccessor(ctx, s.db)
v := db.QueryRowContext(ctx, sqlQuery, id)
if err := v.Err(); err != nil {
return nil, processSQLErrorf(err, "failed to find principal info")
}
info := &types.PrincipalInfo{
ID: id,
}
if err := v.Scan(&info.UID, &info.Email, &info.DisplayName); err != nil {
return nil, processSQLErrorf(err, "failed to scan principal info")
}
return info, nil
}
// FindMany returns a several principal info objects by id from the `principals` database table.
func (s *PrincipalInfoView) FindMany(ctx context.Context, ids []int64) ([]*types.PrincipalInfo, error) {
db := dbtx.GetAccessor(ctx, s.db)
stmt := builder.
Select("principal_id, principal_uid, principal_email, principal_display_name").
From("principals").
Where(squirrel.Eq{"principal_id": ids})
sqlQuery, params, err := stmt.ToSql()
if err != nil {
return nil, processSQLErrorf(err, "failed to generate find many principal info SQL query")
}
rows, err := db.QueryContext(ctx, sqlQuery, params...)
if err != nil {
return nil, processSQLErrorf(err, "failed to query find many principal info")
}
defer func() {
_ = rows.Close()
}()
result := make([]*types.PrincipalInfo, 0, len(ids))
for rows.Next() {
info := &types.PrincipalInfo{}
err = rows.Scan(&info.ID, &info.UID, &info.Email, &info.DisplayName)
if err != nil {
return nil, processSQLErrorf(err, "failed to scan principal info")
}
result = append(result, info)
}
return result, nil
}

View File

@ -6,9 +6,11 @@ package database
import (
"context"
"fmt"
"strings"
"time"
"github.com/harness/gitness/internal/cache"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/store/database/dbtx"
"github.com/harness/gitness/types"
@ -18,20 +20,24 @@ import (
"github.com/guregu/null"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
var _ store.PullReqStore = (*PullReqStore)(nil)
// NewPullReqStore returns a new PullReqStore.
func NewPullReqStore(db *sqlx.DB) *PullReqStore {
func NewPullReqStore(db *sqlx.DB,
pCache *cache.Cache[int64, *types.PrincipalInfo]) *PullReqStore {
return &PullReqStore{
db: db,
db: db,
pCache: pCache,
}
}
// PullReqStore implements store.PullReqStore backed by a relational database.
type PullReqStore struct {
db *sqlx.DB
db *sqlx.DB
pCache *cache.Cache[int64, *types.PrincipalInfo]
}
// pullReq is used to fetch pull request data from the database.
@ -61,13 +67,6 @@ type pullReq struct {
MergedBy null.Int `db:"pullreq_merged_by"`
Merged null.Int `db:"pullreq_merged"`
MergeStrategy null.String `db:"pullreq_merge_strategy"`
AuthorUID string `db:"author_uid"`
AuthorName string `db:"author_name"`
AuthorEmail string `db:"author_email"`
MergerUID null.String `db:"merger_uid"`
MergerName null.String `db:"merger_name"`
MergerEmail null.String `db:"merger_email"`
}
const (
@ -89,19 +88,11 @@ const (
,pullreq_activity_seq
,pullreq_merged_by
,pullreq_merged
,pullreq_merge_strategy
,author.principal_uid as "author_uid"
,author.principal_display_name as "author_name"
,author.principal_email as "author_email"
,merger.principal_uid as "merger_uid"
,merger.principal_display_name as "merger_name"
,merger.principal_email as "merger_email"`
,pullreq_merge_strategy`
pullReqSelectBase = `
SELECT` + pullReqColumns + `
FROM pullreqs
INNER JOIN principals author on author.principal_id = pullreq_created_by
LEFT JOIN principals merger on merger.principal_id = pullreq_merged_by`
FROM pullreqs`
)
// Find finds the pull request by id.
@ -116,7 +107,7 @@ func (s *PullReqStore) Find(ctx context.Context, id int64) (*types.PullReq, erro
return nil, processSQLErrorf(err, "Failed to find pull request")
}
return mapPullReq(dst), nil
return s.mapPullReq(ctx, dst), nil
}
// FindByNumber finds the pull request by repo ID and pull request number.
@ -140,7 +131,7 @@ func (s *PullReqStore) FindByNumberWithLock(
return nil, processSQLErrorf(err, "Failed to find pull request by number")
}
return mapPullReq(dst), nil
return s.mapPullReq(ctx, dst), nil
}
// FindByNumber finds the pull request by repo ID and pull request number.
@ -395,11 +386,16 @@ func (s *PullReqStore) List(ctx context.Context, repoID int64, opts *types.PullR
return nil, processSQLErrorf(err, "Failed executing custom list query")
}
return mapSlicePullReq(dst), nil
result, err := s.mapSlicePullReq(ctx, dst)
if err != nil {
return nil, err
}
return result, nil
}
func mapPullReq(pr *pullReq) *types.PullReq {
m := &types.PullReq{
return &types.PullReq{
ID: pr.ID,
Version: pr.Version,
Number: pr.Number,
@ -421,22 +417,6 @@ func mapPullReq(pr *pullReq) *types.PullReq {
Author: types.PrincipalInfo{},
Merger: nil,
}
m.Author = types.PrincipalInfo{
ID: pr.CreatedBy,
UID: pr.AuthorUID,
DisplayName: pr.AuthorName,
Email: pr.AuthorEmail,
}
if pr.MergedBy.Valid {
m.Merger = &types.PrincipalInfo{
ID: pr.MergedBy.Int64,
UID: pr.MergerUID.String,
DisplayName: pr.MergerName.String,
Email: pr.MergerEmail.String,
}
}
return m
}
func mapInternalPullReq(pr *types.PullReq) *pullReq {
@ -464,10 +444,60 @@ func mapInternalPullReq(pr *types.PullReq) *pullReq {
return m
}
func mapSlicePullReq(prs []*pullReq) []*types.PullReq {
func (s *PullReqStore) mapPullReq(ctx context.Context, pr *pullReq) *types.PullReq {
m := mapPullReq(pr)
var author, merger *types.PrincipalInfo
var err error
author, err = s.pCache.Get(ctx, pr.CreatedBy)
if err != nil {
log.Err(err).Msg("failed to load PR author")
}
if author != nil {
m.Author = *author
}
if pr.MergedBy.Valid {
merger, err = s.pCache.Get(ctx, pr.MergedBy.Int64)
if err != nil {
log.Err(err).Msg("failed to load PR merger")
}
m.Merger = merger
}
return m
}
func (s *PullReqStore) mapSlicePullReq(ctx context.Context, prs []*pullReq) ([]*types.PullReq, error) {
// collect all principal IDs
ids := make([]int64, 0, 2*len(prs))
for _, pr := range prs {
ids = append(ids, pr.CreatedBy)
if pr.MergedBy.Valid {
ids = append(ids, pr.MergedBy.Int64)
}
}
// pull principal infos from cache
infoMap, err := s.pCache.Map(ctx, ids)
if err != nil {
return nil, fmt.Errorf("failed to load PR principal infos: %w", err)
}
// attach the principal infos back to the slice items
m := make([]*types.PullReq, len(prs))
for i, pr := range prs {
m[i] = mapPullReq(pr)
if author, ok := infoMap[pr.CreatedBy]; ok {
m[i].Author = *author
}
if pr.MergedBy.Valid {
if merger, ok := infoMap[pr.MergedBy.Int64]; ok {
m[i].Merger = merger
}
}
}
return m
return m, nil
}

View File

@ -7,8 +7,10 @@ package database
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/harness/gitness/internal/cache"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/store/database/dbtx"
"github.com/harness/gitness/types"
@ -18,20 +20,24 @@ import (
"github.com/guregu/null"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
var _ store.PullReqActivityStore = (*PullReqActivityStore)(nil)
// NewPullReqActivityStore returns a new PullReqJournalStore.
func NewPullReqActivityStore(db *sqlx.DB) *PullReqActivityStore {
func NewPullReqActivityStore(db *sqlx.DB,
pCache *cache.Cache[int64, *types.PrincipalInfo]) *PullReqActivityStore {
return &PullReqActivityStore{
db: db,
db: db,
pCache: pCache,
}
}
// PullReqActivityStore implements store.PullReqActivityStore backed by a relational database.
type PullReqActivityStore struct {
db *sqlx.DB
db *sqlx.DB
pCache *cache.Cache[int64, *types.PrincipalInfo]
}
// journal is used to fetch pull request data from the database.
@ -63,13 +69,6 @@ type pullReqActivity struct {
ResolvedBy null.Int `db:"pullreq_activity_resolved_by"`
Resolved null.Int `db:"pullreq_activity_resolved"`
AuthorUID string `db:"author_uid"`
AuthorName string `db:"author_name"`
AuthorEmail string `db:"author_email"`
ResolverUID null.String `db:"resolver_uid"`
ResolverName null.String `db:"resolver_name"`
ResolverEmail null.String `db:"resolver_email"`
}
const (
@ -93,19 +92,11 @@ const (
,pullreq_activity_payload
,pullreq_activity_metadata
,pullreq_activity_resolved_by
,pullreq_activity_resolved
,author.principal_uid as "author_uid"
,author.principal_display_name as "author_name"
,author.principal_email as "author_email"
,resolver.principal_uid as "resolver_uid"
,resolver.principal_display_name as "resolver_name"
,resolver.principal_email as "resolver_email"`
,pullreq_activity_resolved`
pullreqActivitySelectBase = `
SELECT` + pullreqActivityColumns + `
FROM pullreq_activities
INNER JOIN principals author on author.principal_id = pullreq_activity_created_by
LEFT JOIN principals resolver on resolver.principal_id = pullreq_activity_resolved_by`
FROM pullreq_activities`
)
// Find finds the pull request activity by id.
@ -120,7 +111,7 @@ func (s *PullReqActivityStore) Find(ctx context.Context, id int64) (*types.PullR
return nil, processSQLErrorf(err, "Failed to find pull request activity")
}
return mapPullReqActivity(dst), nil
return s.mapPullReqActivity(ctx, dst), nil
}
// Create creates a new pull request.
@ -347,7 +338,12 @@ func (s *PullReqActivityStore) List(ctx context.Context, prID int64,
return nil, processSQLErrorf(err, "Failed executing pull request activity list query")
}
return mapSlicePullReqActivity(dst), nil
result, err := s.mapSlicePullReqActivity(ctx, dst)
if err != nil {
return nil, err
}
return result, nil
}
func mapPullReqActivity(act *pullReqActivity) *types.PullReqActivity {
@ -375,25 +371,10 @@ func mapPullReqActivity(act *pullReqActivity) *types.PullReqActivity {
Author: types.PrincipalInfo{},
Resolver: nil,
}
m.Author = types.PrincipalInfo{
ID: act.CreatedBy,
UID: act.AuthorUID,
DisplayName: act.AuthorName,
Email: act.AuthorEmail,
}
_ = json.Unmarshal(act.Payload, &m.Payload)
_ = json.Unmarshal(act.Metadata, &m.Metadata)
if act.ResolvedBy.Valid {
m.Resolver = &types.PrincipalInfo{
ID: act.ResolvedBy.Int64,
UID: act.ResolverUID.String,
DisplayName: act.ResolverName.String,
Email: act.ResolverEmail.String,
}
}
return m
}
@ -427,10 +408,61 @@ func mapInternalPullReqActivity(act *types.PullReqActivity) *pullReqActivity {
return m
}
func mapSlicePullReqActivity(a []*pullReqActivity) []*types.PullReqActivity {
m := make([]*types.PullReqActivity, len(a))
for i, act := range a {
m[i] = mapPullReqActivity(act)
func (s *PullReqActivityStore) mapPullReqActivity(ctx context.Context, act *pullReqActivity) *types.PullReqActivity {
m := mapPullReqActivity(act)
var author, resolver *types.PrincipalInfo
var err error
author, err = s.pCache.Get(ctx, act.CreatedBy)
if err != nil {
log.Err(err).Msg("failed to load PR activity author")
}
if author != nil {
m.Author = *author
}
if act.ResolvedBy.Valid {
resolver, err = s.pCache.Get(ctx, act.ResolvedBy.Int64)
if err != nil {
log.Err(err).Msg("failed to load PR activity resolver")
}
m.Resolver = resolver
}
return m
}
func (s *PullReqActivityStore) mapSlicePullReqActivity(ctx context.Context,
activities []*pullReqActivity) ([]*types.PullReqActivity, error) {
// collect all principal IDs
ids := make([]int64, 0, 2*len(activities))
for _, act := range activities {
ids = append(ids, act.CreatedBy)
if act.ResolvedBy.Valid {
ids = append(ids, act.Resolved.Int64)
}
}
// pull principal infos from cache
infoMap, err := s.pCache.Map(ctx, ids)
if err != nil {
return nil, fmt.Errorf("failed to load PR principal infos: %w", err)
}
// attach the principal infos back to the slice items
m := make([]*types.PullReqActivity, len(activities))
for i, act := range activities {
m[i] = mapPullReqActivity(act)
if author, ok := infoMap[act.CreatedBy]; ok {
m[i].Author = *author
}
if act.ResolvedBy.Valid {
if merger, ok := infoMap[act.ResolvedBy.Int64]; ok {
m[i].Resolver = merger
}
}
}
return m, nil
}

View File

@ -6,8 +6,10 @@ package database
import (
"context"
"fmt"
"time"
"github.com/harness/gitness/internal/cache"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/store/database/dbtx"
"github.com/harness/gitness/types"
@ -16,6 +18,7 @@ import (
"github.com/guregu/null"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
var _ store.PullReqReviewerStore = (*PullReqReviewerStore)(nil)
@ -23,15 +26,18 @@ var _ store.PullReqReviewerStore = (*PullReqReviewerStore)(nil)
const maxPullRequestReviewers = 100
// NewPullReqReviewerStore returns a new PullReqReviewerStore.
func NewPullReqReviewerStore(db *sqlx.DB) *PullReqReviewerStore {
func NewPullReqReviewerStore(db *sqlx.DB,
pCache *cache.Cache[int64, *types.PrincipalInfo]) *PullReqReviewerStore {
return &PullReqReviewerStore{
db: db,
db: db,
pCache: pCache,
}
}
// PullReqReviewerStore implements store.PullReqReviewerStore backed by a relational database.
type PullReqReviewerStore struct {
db *sqlx.DB
db *sqlx.DB
pCache *cache.Cache[int64, *types.PrincipalInfo]
}
// pullReqReviewer is used to fetch pull request reviewer data from the database.
@ -48,13 +54,6 @@ type pullReqReviewer struct {
ReviewDecision enum.PullReqReviewDecision `db:"pullreq_reviewer_review_decision"`
SHA string `db:"pullreq_reviewer_sha"`
ReviewerUID string `db:"reviewer_uid"`
ReviewerName string `db:"reviewer_name"`
ReviewerEmail string `db:"reviewer_email"`
AddedByUID string `db:"added_by_uid"`
AddedByName string `db:"added_by_name"`
AddedByEmail string `db:"added_by_email"`
}
const (
@ -68,19 +67,11 @@ const (
,pullreq_reviewer_type
,pullreq_reviewer_latest_review_id
,pullreq_reviewer_review_decision
,pullreq_reviewer_sha
,reviewer.principal_uid as "reviewer_uid"
,reviewer.principal_display_name as "reviewer_name"
,reviewer.principal_email as "reviewer_email"
,added_by.principal_uid as "added_by_uid"
,added_by.principal_display_name as "added_by_name"
,added_by.principal_email as "added_by_email"`
,pullreq_reviewer_sha`
pullreqReviewerSelectBase = `
SELECT` + pullreqReviewerColumns + `
FROM pullreq_reviewers
INNER JOIN principals reviewer on reviewer.principal_id = pullreq_reviewer_principal_id
INNER JOIN principals added_by on added_by.principal_id = pullreq_reviewer_created_by`
FROM pullreq_reviewers`
)
// Find finds the pull request reviewer by pull request id and principal id.
@ -95,7 +86,7 @@ func (s *PullReqReviewerStore) Find(ctx context.Context, prID, principalID int64
return nil, processSQLErrorf(err, "Failed to find pull request reviewer")
}
return mapPullReqReviewer(dst), nil
return s.mapPullReqReviewer(ctx, dst), nil
}
// Create creates a new pull request reviewer.
@ -197,7 +188,12 @@ func (s *PullReqReviewerStore) List(ctx context.Context, prID int64) ([]*types.P
return nil, processSQLErrorf(err, "Failed executing pull request reviewer list query")
}
return mapSlicePullReqReviewer(dst), nil
result, err := s.mapSlicePullReqReviewer(ctx, dst)
if err != nil {
return nil, err
}
return result, nil
}
func mapPullReqReviewer(v *pullReqReviewer) *types.PullReqReviewer {
@ -212,18 +208,6 @@ func mapPullReqReviewer(v *pullReqReviewer) *types.PullReqReviewer {
LatestReviewID: v.LatestReviewID.Ptr(),
ReviewDecision: v.ReviewDecision,
SHA: v.SHA,
Reviewer: types.PrincipalInfo{
ID: v.PrincipalID,
UID: v.ReviewerUID,
DisplayName: v.ReviewerName,
Email: v.ReviewerEmail,
},
AddedBy: types.PrincipalInfo{
ID: v.CreatedBy,
UID: v.AddedByUID,
DisplayName: v.AddedByName,
Email: v.AddedByEmail,
},
}
return m
}
@ -240,20 +224,69 @@ func mapInternalPullReqReviewer(v *types.PullReqReviewer) *pullReqReviewer {
LatestReviewID: null.IntFromPtr(v.LatestReviewID),
ReviewDecision: v.ReviewDecision,
SHA: v.SHA,
ReviewerUID: "",
ReviewerName: "",
ReviewerEmail: "",
AddedByUID: "",
AddedByName: "",
AddedByEmail: "",
}
return m
}
func mapSlicePullReqReviewer(a []*pullReqReviewer) []*types.PullReqReviewer {
m := make([]*types.PullReqReviewer, len(a))
for i, act := range a {
m[i] = mapPullReqReviewer(act)
func (s *PullReqReviewerStore) mapPullReqReviewer(ctx context.Context, v *pullReqReviewer) *types.PullReqReviewer {
m := &types.PullReqReviewer{
PullReqID: v.PullReqID,
PrincipalID: v.PrincipalID,
CreatedBy: v.CreatedBy,
Created: v.Created,
Updated: v.Updated,
RepoID: v.RepoID,
Type: v.Type,
LatestReviewID: v.LatestReviewID.Ptr(),
ReviewDecision: v.ReviewDecision,
SHA: v.SHA,
}
addedBy, err := s.pCache.Get(ctx, v.CreatedBy)
if err != nil {
log.Err(err).Msg("failed to load PR reviewer addedBy")
}
if addedBy != nil {
m.AddedBy = *addedBy
}
reviewer, err := s.pCache.Get(ctx, v.PrincipalID)
if err != nil {
log.Err(err).Msg("failed to load PR reviewer principal")
}
if reviewer != nil {
m.Reviewer = *reviewer
}
return m
}
func (s *PullReqReviewerStore) mapSlicePullReqReviewer(ctx context.Context,
reviewers []*pullReqReviewer) ([]*types.PullReqReviewer, error) {
// collect all principal IDs
ids := make([]int64, 0, 2*len(reviewers))
for _, v := range reviewers {
ids = append(ids, v.CreatedBy)
ids = append(ids, v.PrincipalID)
}
// pull principal infos from cache
infoMap, err := s.pCache.Map(ctx, ids)
if err != nil {
return nil, fmt.Errorf("failed to load PR principal infos: %w", err)
}
// attach the principal infos back to the slice items
m := make([]*types.PullReqReviewer, len(reviewers))
for i, v := range reviewers {
m[i] = mapPullReqReviewer(v)
if addedBy, ok := infoMap[v.CreatedBy]; ok {
m[i].AddedBy = *addedBy
}
if reviewer, ok := infoMap[v.PrincipalID]; ok {
m[i].Reviewer = *reviewer
}
}
return m, nil
}

View File

@ -6,7 +6,9 @@ package database
import (
"context"
"time"
"github.com/harness/gitness/internal/cache"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/types"
@ -22,6 +24,8 @@ const (
var WireSet = wire.NewSet(
ProvideDatabase,
ProvidePrincipalStore,
ProvidePrincipalInfoView,
ProvidePrincipalInfoCache,
ProvideSpaceStore,
ProvideRepoStore,
ProvideTokenStore,
@ -47,6 +51,16 @@ func ProvidePrincipalStore(db *sqlx.DB, uidTransformation store.PrincipalUIDTran
return NewPrincipalStore(db, uidTransformation)
}
// ProvidePrincipalInfoView provides a principal info store.
func ProvidePrincipalInfoView(db *sqlx.DB) store.PrincipalInfoView {
return NewPrincipalInfoStore(db)
}
// ProvidePrincipalInfoCache provides a cache for storing types.PrincipalInfo objects.
func ProvidePrincipalInfoCache(getter store.PrincipalInfoView) *cache.Cache[int64, *types.PrincipalInfo] {
return cache.New[int64, *types.PrincipalInfo](getter, 30*time.Second)
}
// ProvideSpaceStore provides a space store.
func ProvideSpaceStore(db *sqlx.DB, pathTransformation store.PathTransformation) store.SpaceStore {
switch db.DriverName() {
@ -77,13 +91,15 @@ func ProvideTokenStore(db *sqlx.DB) store.TokenStore {
}
// ProvidePullReqStore provides a pull request store.
func ProvidePullReqStore(db *sqlx.DB) store.PullReqStore {
return NewPullReqStore(db)
func ProvidePullReqStore(db *sqlx.DB,
principalInfoCache *cache.Cache[int64, *types.PrincipalInfo]) store.PullReqStore {
return NewPullReqStore(db, principalInfoCache)
}
// ProvidePullReqActivityStore provides a pull request activity store.
func ProvidePullReqActivityStore(db *sqlx.DB) store.PullReqActivityStore {
return NewPullReqActivityStore(db)
func ProvidePullReqActivityStore(db *sqlx.DB,
principalInfoCache *cache.Cache[int64, *types.PrincipalInfo]) store.PullReqActivityStore {
return NewPullReqActivityStore(db, principalInfoCache)
}
// ProvidePullReqReviewStore provides a pull request review store.
@ -92,8 +108,9 @@ func ProvidePullReqReviewStore(db *sqlx.DB) store.PullReqReviewStore {
}
// ProvidePullReqReviewerStore provides a pull request reviewer store.
func ProvidePullReqReviewerStore(db *sqlx.DB) store.PullReqReviewerStore {
return NewPullReqReviewerStore(db)
func ProvidePullReqReviewerStore(db *sqlx.DB,
principalInfoCache *cache.Cache[int64, *types.PrincipalInfo]) store.PullReqReviewerStore {
return NewPullReqReviewerStore(db, principalInfoCache)
}
// ProvideWebhookStore provides a webhook store.

View File

@ -109,6 +109,13 @@ type (
CountServices(ctx context.Context) (int64, error)
}
// PrincipalInfoView defines helper utility for fetching types.PrincipalInfo objects.
// It uses the same underlying data storage as PrincipalStore.
PrincipalInfoView interface {
Find(ctx context.Context, id int64) (*types.PrincipalInfo, error)
FindMany(ctx context.Context, ids []int64) ([]*types.PrincipalInfo, error)
}
// SpaceStore defines the space data storage.
SpaceStore interface {
// Find the space by id.

View File

@ -44,3 +44,7 @@ func (p *Principal) ToPrincipalInfo() *PrincipalInfo {
Email: p.Email,
}
}
func (p *PrincipalInfo) Identifier() int64 {
return p.ID
}