add optional execution information in list pipelines (#384)

This commit is contained in:
Vistaar Juneja 2023-08-31 19:26:53 +00:00 committed by Harness
parent 7e8385c27d
commit c97d4783a9
11 changed files with 245 additions and 26 deletions

View File

@ -19,6 +19,7 @@ func (c *Controller) ListPipelines(
ctx context.Context, ctx context.Context,
session *auth.Session, session *auth.Session,
repoRef string, repoRef string,
latest bool,
filter types.ListQueryFilter, filter types.ListQueryFilter,
) ([]*types.Pipeline, int64, error) { ) ([]*types.Pipeline, int64, error) {
repo, err := c.repoStore.FindByRef(ctx, repoRef) repo, err := c.repoStore.FindByRef(ctx, repoRef)
@ -40,10 +41,18 @@ func (c *Controller) ListPipelines(
return fmt.Errorf("failed to count child executions: %w", err) return fmt.Errorf("failed to count child executions: %w", err)
} }
pipelines, err = c.pipelineStore.List(ctx, repo.ID, filter) if !latest {
if err != nil { pipelines, err = c.pipelineStore.List(ctx, repo.ID, filter)
return fmt.Errorf("failed to count child executions: %w", err) if err != nil {
return fmt.Errorf("failed to list pipelines: %w", err)
}
} else {
pipelines, err = c.pipelineStore.ListLatest(ctx, repo.ID, filter)
if err != nil {
return fmt.Errorf("failed to list latest pipelines: %w", err)
}
} }
return return
}, dbtx.TxDefaultReadOnly) }, dbtx.TxDefaultReadOnly)
if err != nil { if err != nil {

View File

@ -23,7 +23,8 @@ func HandleListPipelines(repoCtrl *repo.Controller) http.HandlerFunc {
} }
filter := request.ParseListQueryFilterFromRequest(r) filter := request.ParseListQueryFilterFromRequest(r)
repos, totalCount, err := repoCtrl.ListPipelines(ctx, session, repoRef, filter) latest := request.GetLatestFromPath(r)
repos, totalCount, err := repoCtrl.ListPipelines(ctx, session, repoRef, latest, filter)
if err != nil { if err != nil {
render.TranslatedUserError(w, err) render.TranslatedUserError(w, err)
return return

View File

@ -7,9 +7,11 @@ package openapi
import ( import (
"net/http" "net/http"
"github.com/gotidy/ptr"
"github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/controller/execution"
"github.com/harness/gitness/internal/api/controller/pipeline" "github.com/harness/gitness/internal/api/controller/pipeline"
"github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/controller/trigger"
"github.com/harness/gitness/internal/api/request"
"github.com/harness/gitness/internal/api/usererror" "github.com/harness/gitness/internal/api/usererror"
"github.com/harness/gitness/types" "github.com/harness/gitness/types"
@ -79,6 +81,20 @@ type updatePipelineRequest struct {
pipeline.UpdateInput pipeline.UpdateInput
} }
var queryParameterLatest = openapi3.ParameterOrRef{
Parameter: &openapi3.Parameter{
Name: request.QueryParamLatest,
In: openapi3.ParameterInQuery,
Description: ptr.String("Whether to fetch latest build information for each pipeline."),
Required: ptr.Bool(false),
Schema: &openapi3.SchemaOrRef{
Schema: &openapi3.Schema{
Type: ptrSchemaType(openapi3.SchemaTypeBoolean),
},
},
},
}
func pipelineOperations(reflector *openapi3.Reflector) { func pipelineOperations(reflector *openapi3.Reflector) {
opCreate := openapi3.Operation{} opCreate := openapi3.Operation{}
opCreate.WithTags("pipeline") opCreate.WithTags("pipeline")
@ -92,9 +108,9 @@ func pipelineOperations(reflector *openapi3.Reflector) {
_ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pipelines", opCreate) _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pipelines", opCreate)
opPipelines := openapi3.Operation{} opPipelines := openapi3.Operation{}
opPipelines.WithTags("repos") opPipelines.WithTags("pipeline")
opPipelines.WithMapOfAnything(map[string]interface{}{"operationId": "listPipelines"}) opPipelines.WithMapOfAnything(map[string]interface{}{"operationId": "listPipelines"})
opPipelines.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit) opPipelines.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit, queryParameterLatest)
_ = reflector.SetRequest(&opPipelines, new(repoRequest), http.MethodGet) _ = reflector.SetRequest(&opPipelines, new(repoRequest), http.MethodGet)
_ = reflector.SetJSONResponse(&opPipelines, []types.Pipeline{}, http.StatusOK) _ = reflector.SetJSONResponse(&opPipelines, []types.Pipeline{}, http.StatusOK)
_ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusInternalServerError) _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusInternalServerError)

View File

@ -15,6 +15,7 @@ const (
PathParamStageNumber = "stage_number" PathParamStageNumber = "stage_number"
PathParamStepNumber = "step_number" PathParamStepNumber = "step_number"
PathParamTriggerUID = "trigger_uid" PathParamTriggerUID = "trigger_uid"
QueryParamLatest = "latest"
) )
func GetPipelineUIDFromPath(r *http.Request) (string, error) { func GetPipelineUIDFromPath(r *http.Request) (string, error) {
@ -39,6 +40,14 @@ func GetStepNumberFromPath(r *http.Request) (int64, error) {
return PathParamAsPositiveInt64(r, PathParamStepNumber) return PathParamAsPositiveInt64(r, PathParamStepNumber)
} }
func GetLatestFromPath(r *http.Request) bool {
v, _ := QueryParam(r, QueryParamLatest)
if v == "true" {
return true
}
return false
}
func GetTriggerUIDFromPath(r *http.Request) (string, error) { func GetTriggerUIDFromPath(r *http.Request) (string, error) {
rawRef, err := PathParamOrError(r, PathParamTriggerUID) rawRef, err := PathParamOrError(r, PathParamTriggerUID)
if err != nil { if err != nil {

View File

@ -490,8 +490,12 @@ type (
// Update tries to update a pipeline in the datastore // Update tries to update a pipeline in the datastore
Update(ctx context.Context, pipeline *types.Pipeline) error Update(ctx context.Context, pipeline *types.Pipeline) error
// List lists the pipelines present in a parent space ID in the datastore. // List lists the pipelines present in a repository in the datastore.
List(ctx context.Context, spaceID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error) List(ctx context.Context, repoID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error)
// ListLatest lists the pipelines present in a repository in the datastore.
// It also returns latest build information for all the returned entries.
ListLatest(ctx context.Context, repoID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error)
// UpdateOptLock updates the pipeline using the optimistic locking mechanism. // UpdateOptLock updates the pipeline using the optimistic locking mechanism.
UpdateOptLock(ctx context.Context, pipeline *types.Pipeline, UpdateOptLock(ctx context.Context, pipeline *types.Pipeline,
@ -500,11 +504,11 @@ type (
// Delete deletes a pipeline ID from the datastore. // Delete deletes a pipeline ID from the datastore.
Delete(ctx context.Context, id int64) error Delete(ctx context.Context, id int64) error
// Count the number of pipelines in a space matching the given filter. // Count the number of pipelines in a repository matching the given filter.
Count(ctx context.Context, spaceID int64, filter types.ListQueryFilter) (int64, error) Count(ctx context.Context, repoID int64, filter types.ListQueryFilter) (int64, error)
// DeleteByUID deletes a pipeline with a given UID in a space // DeleteByUID deletes a pipeline with a given UID under a repo.
DeleteByUID(ctx context.Context, spaceID int64, uid string) error DeleteByUID(ctx context.Context, repoID int64, uid string) error
// IncrementSeqNum increments the sequence number of the pipeline // IncrementSeqNum increments the sequence number of the pipeline
IncrementSeqNum(ctx context.Context, pipeline *types.Pipeline) (*types.Pipeline, error) IncrementSeqNum(ctx context.Context, pipeline *types.Pipeline) (*types.Pipeline, error)

View File

@ -14,6 +14,7 @@ import (
"github.com/harness/gitness/store/database" "github.com/harness/gitness/store/database"
"github.com/harness/gitness/store/database/dbtx" "github.com/harness/gitness/store/database/dbtx"
"github.com/harness/gitness/types" "github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
sqlxtypes "github.com/jmoiron/sqlx/types" sqlxtypes "github.com/jmoiron/sqlx/types"
@ -294,6 +295,7 @@ func (s *executionStore) UpdateOptLock(ctx context.Context,
} }
// List lists the executions for a given pipeline ID. // List lists the executions for a given pipeline ID.
// It orders them in descending order of execution number.
func (s *executionStore) List( func (s *executionStore) List(
ctx context.Context, ctx context.Context,
pipelineID int64, pipelineID int64,
@ -302,7 +304,8 @@ func (s *executionStore) List(
stmt := database.Builder. stmt := database.Builder.
Select(executionColumns). Select(executionColumns).
From("executions"). From("executions").
Where("execution_pipeline_id = ?", fmt.Sprint(pipelineID)) Where("execution_pipeline_id = ?", fmt.Sprint(pipelineID)).
OrderBy("execution_number " + enum.OrderDesc.String())
stmt = stmt.Limit(database.Limit(pagination.Size)) stmt = stmt.Limit(database.Limit(pagination.Size))
stmt = stmt.Offset(database.Offset(pagination.Page, pagination.Size)) stmt = stmt.Offset(database.Offset(pagination.Page, pagination.Size))

View File

@ -1,14 +1,12 @@
package database package database
import ( import (
"encoding/json"
"github.com/harness/gitness/types" "github.com/harness/gitness/types"
) )
func mapInternalToExecution(in *execution) (*types.Execution, error) { func mapInternalToExecution(in *execution) (*types.Execution, error) {
var params map[string]string var params map[string]string
err := json.Unmarshal(in.Params, &params) err := in.Params.Unmarshal(&params)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -207,6 +207,15 @@ INSERT INTO pipelines (
'develop', 'config_path_2', 1678932200, 1678932300, 1 'develop', 'config_path_2', 1678932200, 1678932300, 1
); );
INSERT INTO pipelines (
pipeline_id, pipeline_description, pipeline_uid, pipeline_seq,
pipeline_repo_id, pipeline_default_branch,
pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version
) VALUES (
3, 'Sample Pipeline 3', 'pipeline_uid_3', 0, 1,
'develop', 'config_path_2', 1678932200000, 1678932300000, 1
);
-- Insert some executions -- Insert some executions
INSERT INTO executions ( INSERT INTO executions (
execution_id, execution_pipeline_id, execution_repo_id, execution_trigger, execution_id, execution_pipeline_id, execution_repo_id, execution_trigger,
@ -288,6 +297,26 @@ INSERT INTO executions (
'production', 5, 0, 1678932500, 1678932600, 1678932700, 1678932800, 1 'production', 5, 0, 1678932500, 1678932600, 1678932700, 1678932800, 1
); );
INSERT INTO executions (
execution_id, execution_pipeline_id, execution_repo_id, execution_trigger,
execution_number, execution_parent, execution_status, execution_error,
execution_event, execution_action, execution_link, execution_timestamp,
execution_title, execution_message, execution_before, execution_after,
execution_ref, execution_source_repo, execution_source, execution_target,
execution_author, execution_author_name, execution_author_email,
execution_author_avatar, execution_sender, execution_params, execution_cron,
execution_deploy, execution_deploy_id, execution_debug, execution_started,
execution_finished, execution_created, execution_updated, execution_version
) VALUES (
5, 2, 1, 'manual', 3, 0, 'running', '', 'push', 'created',
'https://example.com/pipelines/1', 1678932400, 'Pipeline Execution 1',
'Pipeline execution message...', 'commit_hash_before', 'commit_hash_after',
'refs/heads/main', 'source_repo_name', 'source_branch', 'target_branch',
'author_login', 'Author Name', 'author@example.com', 'https://example.com/avatar.jpg',
'sender_username', '{"param1": "value1", "param2": "value2"}', '0 0 * * *',
'production', 5, 0, 1678932500, 1678932600, 1678932700, 1678932800, 1
);
-- Insert some stages -- Insert some stages
INSERT INTO stages (stage_id, stage_execution_id, stage_number, stage_parent_group_id, stage_kind, stage_type, stage_name, stage_status, stage_error, stage_errignore, stage_exit_code, stage_limit, stage_os, stage_arch, stage_variant, stage_kernel, stage_machine, stage_started, stage_stopped, stage_created, stage_updated, stage_version, stage_on_success, stage_on_failure, stage_depends_on, stage_labels, stage_limit_repo) INSERT INTO stages (stage_id, stage_execution_id, stage_number, stage_parent_group_id, stage_kind, stage_type, stage_name, stage_status, stage_error, stage_errignore, stage_exit_code, stage_limit, stage_os, stage_arch, stage_variant, stage_kernel, stage_machine, stage_started, stage_stopped, stage_created, stage_updated, stage_version, stage_on_success, stage_on_failure, stage_depends_on, stage_labels, stage_limit_repo)
VALUES ( VALUES (

View File

@ -195,6 +195,78 @@ func (s *pipelineStore) List(
return dst, nil return dst, nil
} }
// ListLatest lists all the pipelines under a repository with information
// about the latest build if available.
func (s *pipelineStore) ListLatest(
ctx context.Context,
repoID int64,
filter types.ListQueryFilter,
) ([]*types.Pipeline, error) {
const pipelineExecutionColumns = pipelineColumns + `
,executions.execution_id
,executions.execution_pipeline_id
,execution_repo_id
,execution_trigger
,execution_number
,execution_status
,execution_error
,execution_link
,execution_timestamp
,execution_title
,execution_author
,execution_author_name
,execution_author_email
,execution_author_avatar
,execution_source
,execution_target
,execution_source_repo
,execution_started
,execution_finished
,execution_created
,execution_updated
`
// Create a subquery to get max execution IDs for each unique execution pipeline ID.
subquery := database.Builder.
Select("execution_pipeline_id, execution_id, MAX(execution_number)").
From("executions").
Where("execution_repo_id = ?").
GroupBy("execution_pipeline_id")
// Convert the subquery to SQL.
subquerySQL, _, err := subquery.ToSql()
if err != nil {
return nil, err
}
// Left join the previous table with executions and pipelines table.
stmt := database.Builder.
Select(pipelineExecutionColumns).
From("pipelines").
LeftJoin("("+subquerySQL+") AS max_executions ON pipelines.pipeline_id = max_executions.execution_pipeline_id").
LeftJoin("executions ON executions.execution_id = max_executions.execution_id").
Where("pipeline_repo_id = ?", fmt.Sprint(repoID))
if filter.Query != "" {
stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query)))
}
stmt = stmt.Limit(database.Limit(filter.Size))
stmt = stmt.Offset(database.Offset(filter.Page, filter.Size))
sql, args, err := stmt.ToSql()
if err != nil {
return nil, errors.Wrap(err, "Failed to convert query to sql")
}
db := dbtx.GetAccessor(ctx, s.db)
dst := []*pipelineExecutionJoin{}
if err = db.SelectContext(ctx, &dst, sql, args...); err != nil {
return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query")
}
return convert(dst), nil
}
// UpdateOptLock updates the pipeline using the optimistic locking mechanism. // UpdateOptLock updates the pipeline using the optimistic locking mechanism.
func (s *pipelineStore) UpdateOptLock(ctx context.Context, func (s *pipelineStore) UpdateOptLock(ctx context.Context,
pipeline *types.Pipeline, pipeline *types.Pipeline,

View File

@ -0,0 +1,77 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package database
import (
"database/sql"
"github.com/harness/gitness/types"
)
// pipelineExecutionjoin struct represents a joined row between pipelines and executions
type pipelineExecutionJoin struct {
*types.Pipeline
ID sql.NullInt64 `db:"execution_id"`
PipelineID sql.NullInt64 `db:"execution_pipeline_id"`
RepoID sql.NullInt64 `db:"execution_repo_id"`
Trigger sql.NullString `db:"execution_trigger"`
Number sql.NullInt64 `db:"execution_number"`
Status sql.NullString `db:"execution_status"`
Error sql.NullString `db:"execution_error"`
Link sql.NullString `db:"execution_link"`
Timestamp sql.NullInt64 `db:"execution_timestamp"`
Title sql.NullString `db:"execution_title"`
Fork sql.NullString `db:"execution_source_repo"`
Source sql.NullString `db:"execution_source"`
Target sql.NullString `db:"execution_target"`
Author sql.NullString `db:"execution_author"`
AuthorName sql.NullString `db:"execution_author_name"`
AuthorEmail sql.NullString `db:"execution_author_email"`
AuthorAvatar sql.NullString `db:"execution_author_avatar"`
Started sql.NullInt64 `db:"execution_started"`
Finished sql.NullInt64 `db:"execution_finished"`
Created sql.NullInt64 `db:"execution_created"`
Updated sql.NullInt64 `db:"execution_updated"`
}
func convert(rows []*pipelineExecutionJoin) []*types.Pipeline {
pipelines := []*types.Pipeline{}
for _, k := range rows {
pipeline := convertPipelineJoin(k)
pipelines = append(pipelines, pipeline)
}
return pipelines
}
func convertPipelineJoin(join *pipelineExecutionJoin) *types.Pipeline {
ret := join.Pipeline
if !join.ID.Valid {
return ret
}
ret.Execution = &types.Execution{
ID: join.ID.Int64,
PipelineID: join.PipelineID.Int64,
RepoID: join.RepoID.Int64,
Trigger: join.Trigger.String,
Number: join.Number.Int64,
Status: join.Status.String,
Error: join.Error.String,
Link: join.Link.String,
Timestamp: join.Timestamp.Int64,
Title: join.Title.String,
Fork: join.Fork.String,
Source: join.Source.String,
Target: join.Target.String,
Author: join.Author.String,
AuthorName: join.AuthorName.String,
AuthorEmail: join.AuthorEmail.String,
AuthorAvatar: join.AuthorAvatar.String,
Started: join.Started.Int64,
Finished: join.Finished.Int64,
Created: join.Created.Int64,
Updated: join.Updated.Int64,
}
return ret
}

View File

@ -5,14 +5,15 @@
package types package types
type Pipeline struct { type Pipeline struct {
ID int64 `db:"pipeline_id" json:"id"` ID int64 `db:"pipeline_id" json:"id"`
Description string `db:"pipeline_description" json:"description"` Description string `db:"pipeline_description" json:"description"`
UID string `db:"pipeline_uid" json:"uid"` UID string `db:"pipeline_uid" json:"uid"`
Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline
RepoID int64 `db:"pipeline_repo_id" json:"repo_id"` RepoID int64 `db:"pipeline_repo_id" json:"repo_id"`
DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"` DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"`
ConfigPath string `db:"pipeline_config_path" json:"config_path"` ConfigPath string `db:"pipeline_config_path" json:"config_path"`
Created int64 `db:"pipeline_created" json:"created"` Created int64 `db:"pipeline_created" json:"created"`
Updated int64 `db:"pipeline_updated" json:"updated"` Execution *Execution `db:"-" json:"execution,omitempty"` // information about the latest execution if available
Version int64 `db:"pipeline_version" json:"version"` Updated int64 `db:"pipeline_updated" json:"updated"`
Version int64 `db:"pipeline_version" json:"-"`
} }