diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 2be6b33f0..000000000 --- a/.npmrc +++ /dev/null @@ -1,3 +0,0 @@ -@harness:registry=https://npm.pkg.github.com -//npm.pkg.github.com/:_authToken=${GITHUB_ACCESS_TOKEN} -always-auth=true \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index ac6220291..bbc673613 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,6 @@ ARG GITHUB_ACCESS_TOKEN # RUN npm ci --omit=dev COPY ./web . -COPY .npmrc /root/.npmrc RUN yarn && yarn build && yarn cache clean @@ -70,18 +69,21 @@ RUN apk --update add ca-certificates # ---------------------------------------------------------# FROM alpine/git:2.36.3 as final -RUN adduser -u 1001 -D -h /app iamuser - # setup app dir and its content WORKDIR /app -RUN chown -R 1001:1001 /app -COPY --from=builder --chown=1001:1001 --chmod=700 /app/gitness /app/gitness +VOLUME /data +ENV XDG_CACHE_HOME /data +ENV GITRPC_SERVER_GIT_ROOT /data +ENV GITNESS_DATABASE_DRIVER sqlite3 +ENV GITNESS_DATABASE_DATASOURCE /data/database.sqlite +ENV GITNESS_METRIC_ENABLED=true +ENV GITNESS_METRIC_ENDPOINT=https://stats.drone.ci/api/v1/gitness + +COPY --from=builder /app/gitness /app/gitness COPY --from=cert-image /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt EXPOSE 3000 EXPOSE 3001 -USER 1001 - ENTRYPOINT [ "/app/gitness", "server" ] \ No newline at end of file diff --git a/cmd/gitness/wire.go b/cmd/gitness/wire.go index 52a9c0639..5d6cb69e9 100644 --- a/cmd/gitness/wire.go +++ b/cmd/gitness/wire.go @@ -41,8 +41,8 @@ import ( "github.com/harness/gitness/internal/bootstrap" gitevents "github.com/harness/gitness/internal/events/git" pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/canceler" "github.com/harness/gitness/internal/pipeline/commit" - eventsstream "github.com/harness/gitness/internal/pipeline/events" "github.com/harness/gitness/internal/pipeline/file" "github.com/harness/gitness/internal/pipeline/manager" "github.com/harness/gitness/internal/pipeline/runner" @@ -57,6 +57,7 @@ import ( pullreqservice "github.com/harness/gitness/internal/services/pullreq" "github.com/harness/gitness/internal/services/trigger" "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/internal/store/cache" "github.com/harness/gitness/internal/store/database" @@ -130,12 +131,13 @@ func initSystem(ctx context.Context, config *types.Config) (*cliserver.System, e triggerer.WireSet, file.WireSet, runner.WireSet, - eventsstream.WireSet, + sse.WireSet, scheduler.WireSet, commit.WireSet, controllertrigger.WireSet, plugin.WireSet, importer.WireSet, + canceler.WireSet, exporter.WireSet, ) return &cliserver.System{}, nil diff --git a/cmd/gitness/wire_gen.go b/cmd/gitness/wire_gen.go index 9ac60c50a..a021845b1 100644 --- a/cmd/gitness/wire_gen.go +++ b/cmd/gitness/wire_gen.go @@ -10,7 +10,7 @@ import ( "context" "github.com/harness/gitness/cli/server" "github.com/harness/gitness/encrypt" - events2 "github.com/harness/gitness/events" + "github.com/harness/gitness/events" "github.com/harness/gitness/gitrpc" server3 "github.com/harness/gitness/gitrpc/server" "github.com/harness/gitness/gitrpc/server/cron" @@ -36,10 +36,10 @@ import ( "github.com/harness/gitness/internal/auth/authn" "github.com/harness/gitness/internal/auth/authz" "github.com/harness/gitness/internal/bootstrap" - events4 "github.com/harness/gitness/internal/events/git" - events3 "github.com/harness/gitness/internal/events/pullreq" + events3 "github.com/harness/gitness/internal/events/git" + events2 "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/canceler" "github.com/harness/gitness/internal/pipeline/commit" - "github.com/harness/gitness/internal/pipeline/events" "github.com/harness/gitness/internal/pipeline/file" "github.com/harness/gitness/internal/pipeline/manager" "github.com/harness/gitness/internal/pipeline/runner" @@ -55,6 +55,7 @@ import ( "github.com/harness/gitness/internal/services/pullreq" trigger2 "github.com/harness/gitness/internal/services/trigger" "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/internal/store/cache" "github.com/harness/gitness/internal/store/database" @@ -125,26 +126,28 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro if err != nil { return nil, err } - repository, err := importer.ProvideRepoImporter(config, provider, gitrpcInterface, repoStore, encrypter, jobScheduler, executor) + streamer := sse.ProvideEventsStreaming(pubSub) + repository, err := importer.ProvideRepoImporter(config, provider, gitrpcInterface, repoStore, encrypter, jobScheduler, executor, streamer) if err != nil { return nil, err } repoController := repo.ProvideController(config, db, provider, pathUID, authorizer, pathStore, repoStore, spaceStore, pipelineStore, principalStore, gitrpcInterface, repository) executionStore := database.ProvideExecutionStore(db) - commitService := commit.ProvideCommitService(gitrpcInterface) stageStore := database.ProvideStageStore(db) - fileService := file.ProvideFileService(gitrpcInterface) schedulerScheduler, err := scheduler.ProvideScheduler(stageStore, mutexManager) if err != nil { return nil, err } - triggererTriggerer := triggerer.ProvideTriggerer(executionStore, stageStore, db, pipelineStore, fileService, schedulerScheduler, repoStore) - executionController := execution.ProvideController(db, authorizer, executionStore, commitService, triggererTriggerer, repoStore, stageStore, pipelineStore) stepStore := database.ProvideStepStore(db) + cancelerCanceler := canceler.ProvideCanceler(executionStore, streamer, repoStore, schedulerScheduler, stageStore, stepStore) + commitService := commit.ProvideCommitService(gitrpcInterface) + checkStore := database.ProvideCheckStore(db, principalInfoCache) + fileService := file.ProvideFileService(gitrpcInterface) + triggererTriggerer := triggerer.ProvideTriggerer(executionStore, checkStore, stageStore, db, pipelineStore, fileService, schedulerScheduler, repoStore) + executionController := execution.ProvideController(db, authorizer, executionStore, cancelerCanceler, commitService, triggererTriggerer, repoStore, stageStore, pipelineStore) logStore := logs.ProvideLogStore(db, config) logStream := livelog.ProvideLogStream(config) logsController := logs2.ProvideController(db, authorizer, executionStore, repoStore, pipelineStore, stageStore, stepStore, logStore, logStream) - eventsStreamer := events.ProvideEventsStreaming(pubSub) secretStore := database.ProvideSecretStore(db) connectorStore := database.ProvideConnectorStore(db) templateStore := database.ProvideTemplateStore(db) @@ -152,7 +155,7 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro if err != nil { return nil, err } - spaceController := space.ProvideController(db, provider, eventsStreamer, pathUID, authorizer, pathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, repoController, membershipStore, repository, exporterRepository) + spaceController := space.ProvideController(db, provider, streamer, pathUID, authorizer, pathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, repoController, membershipStore, repository, exporterRepository) triggerStore := database.ProvideTriggerStore(db) pipelineController := pipeline.ProvideController(db, pathUID, pathStore, repoStore, triggerStore, authorizer, pipelineStore) secretController := secret.ProvideController(db, pathUID, pathStore, encrypter, secretStore, authorizer, spaceStore) @@ -170,20 +173,20 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro if err != nil { return nil, err } - eventsSystem, err := events2.ProvideSystem(eventsConfig, universalClient) + eventsSystem, err := events.ProvideSystem(eventsConfig, universalClient) if err != nil { return nil, err } - reporter, err := events3.ProvideReporter(eventsSystem) + reporter, err := events2.ProvideReporter(eventsSystem) if err != nil { return nil, err } migrator := codecomments.ProvideMigrator(gitrpcInterface) - readerFactory, err := events4.ProvideReaderFactory(eventsSystem) + readerFactory, err := events3.ProvideReaderFactory(eventsSystem) if err != nil { return nil, err } - eventsReaderFactory, err := events3.ProvideReaderFactory(eventsSystem) + eventsReaderFactory, err := events2.ProvideReaderFactory(eventsSystem) if err != nil { return nil, err } @@ -202,14 +205,13 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro return nil, err } webhookController := webhook2.ProvideController(webhookConfig, db, authorizer, webhookStore, webhookExecutionStore, repoStore, webhookService) - eventsReporter, err := events4.ProvideReporter(eventsSystem) + eventsReporter, err := events3.ProvideReporter(eventsSystem) if err != nil { return nil, err } githookController := githook.ProvideController(db, authorizer, principalStore, repoStore, eventsReporter) serviceaccountController := serviceaccount.NewController(principalUID, authorizer, principalStore, spaceStore, repoStore, tokenStore) principalController := principal.ProvideController(principalStore) - checkStore := database.ProvideCheckStore(db, principalInfoCache) checkController := check2.ProvideController(db, authorizer, repoStore, checkStore, gitrpcInterface) systemController := system.NewController(principalStore, config) apiHandler := router.ProvideAPIHandler(config, authenticator, repoController, executionController, logsController, spaceController, pipelineController, secretController, triggerController, connectorController, templateController, pluginController, pullreqController, webhookController, githookController, serviceaccountController, controller, principalController, checkController, systemController) @@ -217,7 +219,7 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro webHandler := router.ProvideWebHandler(config) routerRouter := router.ProvideRouter(config, apiHandler, gitHandler, webHandler) serverServer := server2.ProvideServer(config, routerRouter) - executionManager := manager.ProvideExecutionManager(config, executionStore, pipelineStore, provider, eventsStreamer, fileService, logStore, logStream, repoStore, schedulerScheduler, secretStore, stageStore, stepStore, principalStore) + executionManager := manager.ProvideExecutionManager(config, executionStore, pipelineStore, provider, streamer, fileService, logStore, logStream, checkStore, repoStore, schedulerScheduler, secretStore, stageStore, stepStore, principalStore) client := manager.ProvideExecutionClient(executionManager, config) runtimeRunner, err := runner.ProvideExecutionRunner(config, client, executionManager) if err != nil { diff --git a/internal/api/controller/check/check_report.go b/internal/api/controller/check/check_report.go index ce872e147..ab8388c49 100644 --- a/internal/api/controller/check/check_report.go +++ b/internal/api/controller/check/check_report.go @@ -74,6 +74,10 @@ func (in *ReportInput) Validate() error { } in.Payload.Data = payloadDataJSON + + case enum.CheckPayloadKindPipeline: + return usererror.BadRequest("Kind cannot be pipeline for external checks") + } return nil diff --git a/internal/api/controller/execution/update.go b/internal/api/controller/execution/cancel.go similarity index 69% rename from internal/api/controller/execution/update.go rename to internal/api/controller/execution/cancel.go index ff5ac30fd..ad83f4654 100644 --- a/internal/api/controller/execution/update.go +++ b/internal/api/controller/execution/cancel.go @@ -14,23 +14,18 @@ import ( "github.com/harness/gitness/types/enum" ) -type UpdateInput struct { - Status string `json:"status"` -} - -func (c *Controller) Update( +func (c *Controller) Cancel( ctx context.Context, session *auth.Session, repoRef string, pipelineUID string, executionNum int64, - in *UpdateInput) (*types.Execution, error) { +) (*types.Execution, error) { repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { return nil, fmt.Errorf("failed to find repo by ref: %w", err) } - - err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineExecute) if err != nil { return nil, fmt.Errorf("failed to authorize: %w", err) } @@ -42,16 +37,13 @@ func (c *Controller) Update( execution, err := c.executionStore.FindByNumber(ctx, pipeline.ID, executionNum) if err != nil { - return nil, fmt.Errorf("failed to find execution: %w", err) + return nil, fmt.Errorf("failed to find execution %d: %w", executionNum, err) } - return c.executionStore.UpdateOptLock(ctx, - execution, func(original *types.Execution) error { - // update values only if provided - if in.Status != "" { - original.Status = in.Status - } + err = c.canceler.Cancel(ctx, repo, execution) + if err != nil { + return nil, fmt.Errorf("unable to cancel execution: %w", err) + } - return nil - }) + return execution, nil } diff --git a/internal/api/controller/execution/controller.go b/internal/api/controller/execution/controller.go index dd7983e71..53e96ed57 100644 --- a/internal/api/controller/execution/controller.go +++ b/internal/api/controller/execution/controller.go @@ -6,6 +6,7 @@ package execution import ( "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/pipeline/canceler" "github.com/harness/gitness/internal/pipeline/commit" "github.com/harness/gitness/internal/pipeline/triggerer" "github.com/harness/gitness/internal/store" @@ -17,6 +18,7 @@ type Controller struct { db *sqlx.DB authorizer authz.Authorizer executionStore store.ExecutionStore + canceler canceler.Canceler commitService commit.CommitService triggerer triggerer.Triggerer repoStore store.RepoStore @@ -28,6 +30,7 @@ func NewController( db *sqlx.DB, authorizer authz.Authorizer, executionStore store.ExecutionStore, + canceler canceler.Canceler, commitService commit.CommitService, triggerer triggerer.Triggerer, repoStore store.RepoStore, @@ -38,6 +41,7 @@ func NewController( db: db, authorizer: authorizer, executionStore: executionStore, + canceler: canceler, commitService: commitService, triggerer: triggerer, repoStore: repoStore, diff --git a/internal/api/controller/execution/create.go b/internal/api/controller/execution/create.go index d8a1ab197..9647724bd 100644 --- a/internal/api/controller/execution/create.go +++ b/internal/api/controller/execution/create.go @@ -60,6 +60,7 @@ func (c *Controller) Create( hook := &triggerer.Hook{ Trigger: session.Principal.UID, // who/what triggered the build, different from commit author AuthorLogin: commit.Author.Identity.Name, + TriggeredBy: session.Principal.ID, AuthorName: commit.Author.Identity.Name, AuthorEmail: commit.Author.Identity.Email, Ref: ref, diff --git a/internal/api/controller/execution/wire.go b/internal/api/controller/execution/wire.go index 504b3a18a..569b5c8cd 100644 --- a/internal/api/controller/execution/wire.go +++ b/internal/api/controller/execution/wire.go @@ -6,6 +6,7 @@ package execution import ( "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/pipeline/canceler" "github.com/harness/gitness/internal/pipeline/commit" "github.com/harness/gitness/internal/pipeline/triggerer" "github.com/harness/gitness/internal/store" @@ -22,12 +23,13 @@ var WireSet = wire.NewSet( func ProvideController(db *sqlx.DB, authorizer authz.Authorizer, executionStore store.ExecutionStore, + canceler canceler.Canceler, commitService commit.CommitService, triggerer triggerer.Triggerer, repoStore store.RepoStore, stageStore store.StageStore, pipelineStore store.PipelineStore, ) *Controller { - return NewController(db, authorizer, executionStore, commitService, + return NewController(db, authorizer, executionStore, canceler, commitService, triggerer, repoStore, stageStore, pipelineStore) } diff --git a/internal/api/controller/pipeline/create.go b/internal/api/controller/pipeline/create.go index 503e90155..aff6dcdb7 100644 --- a/internal/api/controller/pipeline/create.go +++ b/internal/api/controller/pipeline/create.go @@ -59,6 +59,7 @@ func (c *Controller) Create( Description: in.Description, RepoID: repo.ID, UID: in.UID, + CreatedBy: session.Principal.ID, Seq: 0, DefaultBranch: in.DefaultBranch, ConfigPath: in.ConfigPath, @@ -84,8 +85,8 @@ func (c *Controller) Create( UID: "default", Actions: []enum.TriggerAction{enum.TriggerActionPullReqCreated, enum.TriggerActionPullReqReopened, enum.TriggerActionPullReqBranchUpdated}, - Enabled: true, - Version: 0, + Disabled: false, + Version: 0, } err = c.triggerStore.Create(ctx, trigger) if err != nil { diff --git a/internal/api/controller/repo/content_get.go b/internal/api/controller/repo/content_get.go index 117bff5e5..d806c39d7 100644 --- a/internal/api/controller/repo/content_get.go +++ b/internal/api/controller/repo/content_get.go @@ -108,7 +108,7 @@ func (c *Controller) GetContent(ctx context.Context, IncludeLatestCommit: includeLatestCommit, }) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to read tree node: %w", err) } info, err := mapToContentInfo(treeNodeOutput.Node, treeNodeOutput.Commit, includeLatestCommit) diff --git a/internal/api/controller/repo/delete.go b/internal/api/controller/repo/delete.go index 483655036..609ef7944 100644 --- a/internal/api/controller/repo/delete.go +++ b/internal/api/controller/repo/delete.go @@ -40,10 +40,7 @@ func (c *Controller) Delete(ctx context.Context, session *auth.Session, repoRef log.Ctx(ctx).Info().Msgf("Delete request received for repo %s , id: %d", repo.Path, repo.ID) - // TODO: uncomment when soft delete is implemented - // return c.DeleteNoAuth(ctx, session, repo) - - return nil + return c.DeleteNoAuth(ctx, session, repo) } func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, repo *types.Repository) error { diff --git a/internal/api/controller/repo/import.go b/internal/api/controller/repo/import.go index 09dd919dd..76489d68c 100644 --- a/internal/api/controller/repo/import.go +++ b/internal/api/controller/repo/import.go @@ -7,11 +7,11 @@ package repo import ( "context" "fmt" + "github.com/harness/gitness/internal/api/usererror" "github.com/harness/gitness/internal/auth" "github.com/harness/gitness/internal/paths" "github.com/harness/gitness/internal/services/importer" - "github.com/harness/gitness/internal/services/job" "github.com/harness/gitness/store/database/dbtx" "github.com/harness/gitness/types" "github.com/harness/gitness/types/enum" @@ -43,11 +43,6 @@ func (c *Controller) Import(ctx context.Context, session *auth.Session, in *Impo return nil, err } - jobUID, err := job.UID() - if err != nil { - return nil, fmt.Errorf("error creating job UID: %w", err) - } - var repo *types.Repository err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { // lock parent space path to ensure it doesn't get updated while we setup new repo @@ -57,7 +52,7 @@ func (c *Controller) Import(ctx context.Context, session *auth.Session, in *Impo } pathToRepo := paths.Concatinate(spacePath.Value, in.UID) - repo = remoteRepository.ToRepo(parentSpace.ID, pathToRepo, in.UID, in.Description, jobUID, &session.Principal) + repo = remoteRepository.ToRepo(parentSpace.ID, pathToRepo, in.UID, in.Description, &session.Principal) err = c.repoStore.Create(ctx, repo) if err != nil { diff --git a/internal/api/controller/repo/raw.go b/internal/api/controller/repo/raw.go index 26fa50e9b..161906467 100644 --- a/internal/api/controller/repo/raw.go +++ b/internal/api/controller/repo/raw.go @@ -42,7 +42,7 @@ func (c *Controller) Raw(ctx context.Context, IncludeLatestCommit: false, }) if err != nil { - return nil, 0, err + return nil, 0, fmt.Errorf("failed to read tree node: %w", err) } // viewing Raw content is only supported for blob content diff --git a/internal/api/controller/secret/create.go b/internal/api/controller/secret/create.go index eab8c1b16..08dd5dee9 100644 --- a/internal/api/controller/secret/create.go +++ b/internal/api/controller/secret/create.go @@ -51,6 +51,7 @@ func (c *Controller) Create(ctx context.Context, session *auth.Session, in *Crea var secret *types.Secret now := time.Now().UnixMilli() secret = &types.Secret{ + CreatedBy: session.Principal.ID, Description: in.Description, Data: in.Data, SpaceID: parentSpace.ID, diff --git a/internal/api/controller/space/controller.go b/internal/api/controller/space/controller.go index f1da34473..a13c92a93 100644 --- a/internal/api/controller/space/controller.go +++ b/internal/api/controller/space/controller.go @@ -7,9 +7,9 @@ package space import ( "github.com/harness/gitness/internal/api/controller/repo" "github.com/harness/gitness/internal/auth/authz" - "github.com/harness/gitness/internal/pipeline/events" "github.com/harness/gitness/internal/services/exporter" "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/internal/url" "github.com/harness/gitness/types/check" @@ -20,7 +20,7 @@ import ( type Controller struct { db *sqlx.DB urlProvider *url.Provider - eventsStream events.EventsStreamer + sseStreamer sse.Streamer uidCheck check.PathUID authorizer authz.Authorizer pathStore store.PathStore @@ -37,7 +37,7 @@ type Controller struct { exporter *exporter.Repository } -func NewController(db *sqlx.DB, urlProvider *url.Provider, eventsStream events.EventsStreamer, +func NewController(db *sqlx.DB, urlProvider *url.Provider, sseStreamer sse.Streamer, uidCheck check.PathUID, authorizer authz.Authorizer, pathStore store.PathStore, pipelineStore store.PipelineStore, secretStore store.SecretStore, connectorStore store.ConnectorStore, templateStore store.TemplateStore, spaceStore store.SpaceStore, @@ -47,7 +47,7 @@ func NewController(db *sqlx.DB, urlProvider *url.Provider, eventsStream events.E return &Controller{ db: db, urlProvider: urlProvider, - eventsStream: eventsStream, + sseStreamer: sseStreamer, uidCheck: uidCheck, authorizer: authorizer, pathStore: pathStore, diff --git a/internal/api/controller/space/delete.go b/internal/api/controller/space/delete.go index 5a4e96dbd..5e7d7d803 100644 --- a/internal/api/controller/space/delete.go +++ b/internal/api/controller/space/delete.go @@ -13,8 +13,6 @@ import ( "github.com/harness/gitness/internal/auth" "github.com/harness/gitness/types" "github.com/harness/gitness/types/enum" - - "github.com/rs/zerolog/log" ) // Delete deletes a space. @@ -26,13 +24,12 @@ func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceDelete, false); err != nil { return err } - // TODO: uncomment when soft delete is implemented - log.Ctx(ctx).Info().Msgf("Delete request received for space %s", space.Path) - // return c.DeleteNoAuth(ctx, session, space.ID) - return nil + + return c.DeleteNoAuth(ctx, session, space.ID) } -// DeleteNoAuth bypasses PermissionSpaceDelete, PermissionSpaceView, PermissionRepoView, and PermissionRepoDelete. +// DeleteNoAuth deletes the space - no authorization is verified. +// WARNING this is meant for internal calls only. func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error { filter := &types.SpaceFilter{ Page: 1, @@ -62,10 +59,25 @@ func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, sp return nil } -func (c *Controller) DeleteWithPathNoAuth(ctx context.Context, session *auth.Session, spacePath string) error { - space, err := c.spaceStore.FindByRef(ctx, spacePath) - if err != nil { - return err +// deleteRepositoriesNoAuth deletes all repositories in a space - no authorization is verified. +// WARNING this is meant for internal calls only. +func (c *Controller) deleteRepositoriesNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error { + filter := &types.RepoFilter{ + Page: 1, + Size: int(math.MaxInt), + Query: "", + Order: enum.OrderAsc, + Sort: enum.RepoAttrNone, } - return c.DeleteNoAuth(ctx, session, space.ID) + repos, _, err := c.ListRepositoriesNoAuth(ctx, spaceID, filter) + if err != nil { + return fmt.Errorf("failed to list space repositories: %w", err) + } + for _, repo := range repos { + err = c.repoCtrl.DeleteNoAuth(ctx, session, repo) + if err != nil { + return fmt.Errorf("failed to delete repository %d: %w", repo.ID, err) + } + } + return nil } diff --git a/internal/api/controller/space/delete_repositories.go b/internal/api/controller/space/delete_repositories.go deleted file mode 100644 index cbb2cfec1..000000000 --- a/internal/api/controller/space/delete_repositories.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2022 Harness Inc. All rights reserved. -// Use of this source code is governed by the Polyform Free Trial License -// that can be found in the LICENSE.md file for this repository. - -package space - -import ( - "context" - "fmt" - "math" - - "github.com/harness/gitness/internal/auth" - "github.com/harness/gitness/types" - "github.com/harness/gitness/types/enum" -) - -// deleteRepositoriesNoAuth does not check PermissionRepoView, and PermissionRepoDelete permissions. -// Call this through Delete(Space) api to make sure the caller has DeleteSpace permission. -func (c *Controller) deleteRepositoriesNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error { - filter := &types.RepoFilter{ - Page: 1, - Size: int(math.MaxInt), - Query: "", - Order: enum.OrderAsc, - Sort: enum.RepoAttrNone, - } - repos, _, err := c.ListRepositoriesNoAuth(ctx, spaceID, filter) - if err != nil { - return fmt.Errorf("failed to list space repositories: %w", err) - } - for _, repo := range repos { - err = c.repoCtrl.DeleteNoAuth(ctx, session, repo) - if err != nil { - return fmt.Errorf("failed to delete repository %d: %w", repo.ID, err) - } - } - return nil -} diff --git a/internal/api/controller/space/events.go b/internal/api/controller/space/events.go index 5094d82c4..425cf50a4 100644 --- a/internal/api/controller/space/events.go +++ b/internal/api/controller/space/events.go @@ -39,16 +39,21 @@ func (c *Controller) Events( return fmt.Errorf("failed to authorize stream: %w", err) } - ctx, cancel := context.WithTimeout(ctx, tailMaxTime) - defer cancel() + ctx, ctxCancel := context.WithTimeout(ctx, tailMaxTime) + defer ctxCancel() io.WriteString(w, ": ping\n\n") w.Flush() - events, errc, consumer := c.eventsStream.Subscribe(ctx, space.ID) - defer c.eventsStream.Unsubscribe(ctx, consumer) + eventStream, errorStream, sseCancel := c.sseStreamer.Stream(ctx, space.ID) + defer func() { + uerr := sseCancel(ctx) + if uerr != nil { + log.Ctx(ctx).Warn().Err(uerr).Msgf("failed to cancel sse stream for space '%s'", space.Path) + } + }() // could not get error channel - if errc == nil { + if errorStream == nil { io.WriteString(w, "event: error\ndata: eof\n\n") w.Flush() return fmt.Errorf("could not get error channel") @@ -72,17 +77,20 @@ L: case <-ctx.Done(): log.Debug().Msg("events: stream cancelled") break L - case err := <-errc: + case err := <-errorStream: log.Err(err).Msg("events: received error in the tail channel") break L case <-pingTimer.C: // if time b/w messages takes longer, send a ping io.WriteString(w, ": ping\n\n") w.Flush() - case event := <-events: + case event := <-eventStream: + io.WriteString(w, fmt.Sprintf("event: %s\n", event.Type)) io.WriteString(w, "data: ") - enc.Encode(event) - io.WriteString(w, "\n\n") + enc.Encode(event.Data) + // NOTE: enc.Encode is ending the data with a new line, only add one more + // Source: https://cs.opensource.google/go/go/+/refs/tags/go1.21.1:src/encoding/json/stream.go;l=220 + io.WriteString(w, "\n") w.Flush() } } diff --git a/internal/api/controller/space/import.go b/internal/api/controller/space/import.go index 0720632fb..4a5b1d76a 100644 --- a/internal/api/controller/space/import.go +++ b/internal/api/controller/space/import.go @@ -16,7 +16,6 @@ import ( "github.com/harness/gitness/internal/bootstrap" "github.com/harness/gitness/internal/paths" "github.com/harness/gitness/internal/services/importer" - "github.com/harness/gitness/internal/services/job" "github.com/harness/gitness/store/database/dbtx" "github.com/harness/gitness/types" "github.com/harness/gitness/types/check" @@ -131,16 +130,9 @@ func (c *Controller) Import(ctx context.Context, session *auth.Session, in *Impo } for i, remoteRepository := range remoteRepositories { - var jobUID string - - jobUID, err = job.UID() - if err != nil { - return fmt.Errorf("error creating job UID: %w", err) - } - pathToRepo := paths.Concatinate(path.Value, remoteRepository.UID) repo := remoteRepository.ToRepo( - space.ID, pathToRepo, remoteRepository.UID, "", jobUID, &session.Principal) + space.ID, pathToRepo, remoteRepository.UID, "", &session.Principal) err = c.repoStore.Create(ctx, repo) if err != nil { diff --git a/internal/api/controller/space/wire.go b/internal/api/controller/space/wire.go index 8bf6379f4..adeda6171 100644 --- a/internal/api/controller/space/wire.go +++ b/internal/api/controller/space/wire.go @@ -7,9 +7,9 @@ package space import ( "github.com/harness/gitness/internal/api/controller/repo" "github.com/harness/gitness/internal/auth/authz" - "github.com/harness/gitness/internal/pipeline/events" "github.com/harness/gitness/internal/services/exporter" "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/internal/url" "github.com/harness/gitness/types/check" @@ -23,14 +23,14 @@ var WireSet = wire.NewSet( ProvideController, ) -func ProvideController(db *sqlx.DB, urlProvider *url.Provider, eventsStream events.EventsStreamer, +func ProvideController(db *sqlx.DB, urlProvider *url.Provider, sseStreamer sse.Streamer, uidCheck check.PathUID, authorizer authz.Authorizer, pathStore store.PathStore, pipelineStore store.PipelineStore, secretStore store.SecretStore, connectorStore store.ConnectorStore, templateStore store.TemplateStore, spaceStore store.SpaceStore, repoStore store.RepoStore, principalStore store.PrincipalStore, repoCtrl *repo.Controller, membershipStore store.MembershipStore, importer *importer.Repository, exporter *exporter.Repository, ) *Controller { - return NewController(db, urlProvider, eventsStream, uidCheck, authorizer, + return NewController(db, urlProvider, sseStreamer, uidCheck, authorizer, pathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, diff --git a/internal/api/controller/trigger/create.go b/internal/api/controller/trigger/create.go index 60a8451d4..74052ee79 100644 --- a/internal/api/controller/trigger/create.go +++ b/internal/api/controller/trigger/create.go @@ -21,7 +21,7 @@ type CreateInput struct { Description string `json:"description"` UID string `json:"uid"` Secret string `json:"secret"` - Enabled bool `json:"enabled"` + Disabled bool `json:"disabled"` Actions []enum.TriggerAction `json:"actions"` } @@ -56,7 +56,7 @@ func (c *Controller) Create( now := time.Now().UnixMilli() trigger := &types.Trigger{ Description: in.Description, - Enabled: in.Enabled, + Disabled: in.Disabled, Secret: in.Secret, CreatedBy: session.Principal.ID, RepoID: repo.ID, diff --git a/internal/api/controller/trigger/update.go b/internal/api/controller/trigger/update.go index 282d2978c..2f7e7bb25 100644 --- a/internal/api/controller/trigger/update.go +++ b/internal/api/controller/trigger/update.go @@ -21,7 +21,7 @@ type UpdateInput struct { UID *string `json:"uid"` Actions []enum.TriggerAction `json:"actions"` Secret *string `json:"secret"` - Enabled *bool `json:"enabled"` // can be nil, so keeping it a pointer + Disabled *bool `json:"disabled"` // can be nil, so keeping it a pointer } func (c *Controller) Update( @@ -72,8 +72,8 @@ func (c *Controller) Update( if in.Secret != nil { original.Secret = *in.Secret } - if in.Enabled != nil { - original.Enabled = *in.Enabled + if in.Disabled != nil { + original.Disabled = *in.Disabled } return nil diff --git a/internal/api/handler/execution/update.go b/internal/api/handler/execution/cancel.go similarity index 71% rename from internal/api/handler/execution/update.go rename to internal/api/handler/execution/cancel.go index 5709bf21d..f66d7c72a 100644 --- a/internal/api/handler/execution/update.go +++ b/internal/api/handler/execution/cancel.go @@ -1,11 +1,9 @@ // Copyright 2022 Harness Inc. All rights reserved. // Use of this source code is governed by the Polyform Free Trial License // that can be found in the LICENSE.md file for this repository. - package execution import ( - "encoding/json" "net/http" "github.com/harness/gitness/internal/api/controller/execution" @@ -13,40 +11,32 @@ import ( "github.com/harness/gitness/internal/api/request" ) -func HandleUpdate(executionCtrl *execution.Controller) http.HandlerFunc { +func HandleCancel(executionCtrl *execution.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - - in := new(execution.UpdateInput) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequestf(w, "Invalid Request Body: %s.", err) - return - } - pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - repoRef, err := request.GetRepoRefFromPath(r) - if err != nil { - render.TranslatedUserError(w, err) - return - } n, err := request.GetExecutionNumberFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - - pipeline, err := executionCtrl.Update(ctx, session, repoRef, pipelineUID, n, in) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - render.JSON(w, http.StatusOK, pipeline) + execution, err := executionCtrl.Cancel(ctx, session, repoRef, pipelineUID, n) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, execution) } } diff --git a/internal/api/handler/space/events_stream.go b/internal/api/handler/space/events.go similarity index 90% rename from internal/api/handler/space/events_stream.go rename to internal/api/handler/space/events.go index 0abebbd33..d1ba1225f 100644 --- a/internal/api/handler/space/events_stream.go +++ b/internal/api/handler/space/events.go @@ -15,9 +15,9 @@ import ( "github.com/rs/zerolog/log" ) -// HandleEventsStream returns an http.HandlerFunc that watches for +// HandleEvents returns an http.HandlerFunc that watches for // events on a space -func HandleEventsStream(spaceCtrl *space.Controller) http.HandlerFunc { +func HandleEvents(spaceCtrl *space.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) diff --git a/internal/api/openapi/pipeline.go b/internal/api/openapi/pipeline.go index d4bd22fb1..c48f809f7 100644 --- a/internal/api/openapi/pipeline.go +++ b/internal/api/openapi/pipeline.go @@ -7,7 +7,6 @@ package openapi import ( "net/http" - "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/controller/pipeline" "github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/request" @@ -66,11 +65,6 @@ type getPipelineRequest struct { pipelineRequest } -type updateExecutionRequest struct { - executionRequest - execution.UpdateInput -} - type updateTriggerRequest struct { triggerRequest trigger.UpdateInput @@ -193,6 +187,18 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionFind) + executionCancel := openapi3.Operation{} + executionCancel.WithTags("pipeline") + executionCancel.WithMapOfAnything(map[string]interface{}{"operationId": "cancelExecution"}) + _ = reflector.SetRequest(&executionCancel, new(getExecutionRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&executionCancel, new(types.Execution), http.StatusOK) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}/cancel", executionCancel) + executionDelete := openapi3.Operation{} executionDelete.WithTags("pipeline") executionDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteExecution"}) @@ -205,19 +211,6 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionDelete) - executionUpdate := openapi3.Operation{} - executionUpdate.WithTags("pipeline") - executionUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateExecution"}) - _ = reflector.SetRequest(&executionUpdate, new(updateExecutionRequest), http.MethodPatch) - _ = reflector.SetJSONResponse(&executionUpdate, new(types.Execution), http.StatusOK) - _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusBadRequest) - _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusInternalServerError) - _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusUnauthorized) - _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusForbidden) - _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusNotFound) - _ = reflector.Spec.AddOperation(http.MethodPatch, - "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionUpdate) - executionList := openapi3.Operation{} executionList.WithTags("pipeline") executionList.WithMapOfAnything(map[string]interface{}{"operationId": "listExecutions"}) diff --git a/internal/pipeline/canceler/canceler.go b/internal/pipeline/canceler/canceler.go new file mode 100644 index 000000000..59e161171 --- /dev/null +++ b/internal/pipeline/canceler/canceler.go @@ -0,0 +1,143 @@ +package canceler + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type service struct { + executionStore store.ExecutionStore + sseStreamer sse.Streamer + repoStore store.RepoStore + scheduler scheduler.Scheduler + stageStore store.StageStore + stepStore store.StepStore +} + +// Canceler cancels a build. +type Canceler interface { + // Cancel cancels the provided execution. + Cancel(ctx context.Context, repo *types.Repository, execution *types.Execution) error +} + +// New returns a cancellation service that encapsulates +// all cancellation operations. +func New( + executionStore store.ExecutionStore, + sseStreamer sse.Streamer, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + stageStore store.StageStore, + stepStore store.StepStore, +) Canceler { + return &service{ + executionStore: executionStore, + sseStreamer: sseStreamer, + repoStore: repoStore, + scheduler: scheduler, + stageStore: stageStore, + stepStore: stepStore, + } +} + +func (s *service) Cancel(ctx context.Context, repo *types.Repository, execution *types.Execution) error { + log := log.With(). + Int64("execution.id", execution.ID). + Str("execution.status", string(execution.Status)). + Str("execution.Ref", execution.Ref). + Logger() + + // do not cancel the build if the build status is + // complete. only cancel the build if the status is + // running or pending. + switch execution.Status { + case enum.CIStatusPending, enum.CIStatusRunning: + default: + return nil + } + + // update the build status to killed. if the update fails + // due to an optimistic lock error it means the build has + // already started, and should now be ignored. + now := time.Now().UnixMilli() + execution.Status = enum.CIStatusKilled + execution.Finished = now + if execution.Started == 0 { + execution.Started = now + } + + err := s.executionStore.Update(ctx, execution) + if err != nil { + return fmt.Errorf("could not update execution status to canceled: %w", err) + } + + stages, err := s.stageStore.ListWithSteps(ctx, execution.ID) + if err != nil { + return fmt.Errorf("could not list stages with steps: %w", err) + } + + // update the status of all steps to indicate they + // were killed or skipped. + for _, stage := range stages { + if stage.Status.IsDone() { + continue + } + if stage.Started != 0 { + stage.Status = enum.CIStatusKilled + } else { + stage.Status = enum.CIStatusSkipped + stage.Started = now + } + stage.Stopped = now + err := s.stageStore.Update(ctx, stage) + if err != nil { + log.Debug().Err(err). + Int64("stage.number", stage.Number). + Msg("canceler: cannot update stage status") + } + + // update the status of all steps to indicate they + // were killed or skipped. + for _, step := range stage.Steps { + if step.Status.IsDone() { + continue + } + if step.Started != 0 { + step.Status = enum.CIStatusKilled + } else { + step.Status = enum.CIStatusSkipped + step.Started = now + } + step.Stopped = now + step.ExitCode = 130 + err := s.stepStore.Update(ctx, step) + if err != nil { + log.Debug().Err(err). + Int64("stage.number", stage.Number). + Int64("step.number", step.Number). + Msg("canceler: cannot update step status") + } + } + } + + execution.Stages = stages + log.Info().Msg("canceler: successfully cancelled build") + + // trigger a SSE to notify subscribers that + // the execution was cancelled. + err = s.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypeExecutionCanceled, execution) + if err != nil { + log.Debug().Err(err).Msg("canceler: failed to publish server-sent event") + } + + return nil +} diff --git a/internal/pipeline/canceler/wire.go b/internal/pipeline/canceler/wire.go new file mode 100644 index 000000000..7af6da18d --- /dev/null +++ b/internal/pipeline/canceler/wire.go @@ -0,0 +1,29 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package canceler + +import ( + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideCanceler, +) + +// ProvideExecutionManager provides an execution manager. +func ProvideCanceler( + executionStore store.ExecutionStore, + sseStreamer sse.Streamer, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + stageStore store.StageStore, + stepStore store.StepStore) Canceler { + return New(executionStore, sseStreamer, repoStore, scheduler, stageStore, stepStore) +} diff --git a/internal/pipeline/checks/write.go b/internal/pipeline/checks/write.go new file mode 100644 index 000000000..bf47b68d5 --- /dev/null +++ b/internal/pipeline/checks/write.go @@ -0,0 +1,57 @@ +package checks + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Write is a util function which writes execution and pipeline state to the +// check store. +func Write( + ctx context.Context, + checkStore store.CheckStore, + execution *types.Execution, + pipeline *types.Pipeline, +) error { + payload := types.CheckPayloadInternal{ + Number: execution.Number, + RepoID: execution.RepoID, + PipelineID: execution.PipelineID, + } + data, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("could not marshal check payload: %w", err) + } + now := time.Now().UnixMilli() + summary := pipeline.Description + if summary == "" { + summary = pipeline.UID + } + check := &types.Check{ + RepoID: execution.RepoID, + UID: pipeline.UID, + Summary: summary, + Created: now, + Updated: now, + CreatedBy: execution.CreatedBy, + Status: execution.Status.ConvertToCheckStatus(), + CommitSHA: execution.After, + Metadata: []byte("{}"), + Payload: types.CheckPayload{ + Version: "1", + Kind: enum.CheckPayloadKindPipeline, + Data: data, + }, + } + err = checkStore.Upsert(ctx, check) + if err != nil { + return fmt.Errorf("could not upsert to check store: %w", err) + } + return nil +} diff --git a/internal/pipeline/events/events.go b/internal/pipeline/events/events.go deleted file mode 100644 index 632a799ec..000000000 --- a/internal/pipeline/events/events.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Harness Inc. All rights reserved. -// Use of this source code is governed by the Polyform Free Trial License -// that can be found in the LICENSE.md file for this repository. - -package events - -import ( - "context" - "encoding/json" - "strconv" - - "github.com/harness/gitness/pubsub" - "github.com/harness/gitness/types/enum" -) - -// Event is an event which is sent to the UI via server-sent events. -type Event struct { - Type enum.EventType `json:"type"` - Data json.RawMessage `json:"data"` -} - -type EventsStreamer interface { - // Publish publishes an event to a given space ID. - Publish(ctx context.Context, spaceID int64, event *Event) error - - // Subscribe listens to events on a space ID. - Subscribe(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, pubsub.Consumer) - - // Unsubscribe unsubscribes the consumer. - Unsubscribe(ctx context.Context, consumer pubsub.Consumer) error -} - -type event struct { - pubsub pubsub.PubSub - topic string -} - -func New(pubsub pubsub.PubSub, topic string) EventsStreamer { - return &event{ - pubsub: pubsub, - topic: topic, - } -} - -func (e *event) Publish(ctx context.Context, spaceID int64, event *Event) error { - bytes, err := json.Marshal(event) - if err != nil { - return err - } - option := pubsub.WithPublishNamespace(format(spaceID)) - return e.pubsub.Publish(ctx, e.topic, bytes, option) -} - -// format creates the namespace name which will be spaces- -func format(id int64) string { - return "spaces-" + strconv.Itoa(int(id)) -} - -func (e *event) Subscribe(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, pubsub.Consumer) { - chEvent := make(chan *Event, 100) // TODO: check best size here - chErr := make(chan error) - g := func(payload []byte) error { - event := &Event{} - err := json.Unmarshal(payload, event) - if err != nil { - // This should never happen - return err - } - select { - case chEvent <- event: - default: - } - - return nil - } - option := pubsub.WithChannelNamespace(format(spaceID)) - consumer := e.pubsub.Subscribe(ctx, e.topic, g, option) - return chEvent, chErr, consumer -} - -func (e *event) Unsubscribe(ctx context.Context, consumer pubsub.Consumer) error { - return consumer.Unsubscribe(ctx, e.topic) -} diff --git a/internal/pipeline/file/gitness.go b/internal/pipeline/file/gitness.go index 13fdecbae..784f23238 100644 --- a/internal/pipeline/file/gitness.go +++ b/internal/pipeline/file/gitness.go @@ -7,7 +7,7 @@ package file import ( "context" "fmt" - "io/ioutil" + "io" "github.com/harness/gitness/gitrpc" "github.com/harness/gitness/types" @@ -37,7 +37,7 @@ func (f *service) Get( IncludeLatestCommit: false, }) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to read tree node: %w", err) } // viewing Raw content is only supported for blob content if treeNodeOutput.Node.Type != gitrpc.TreeNodeTypeBlob { @@ -53,7 +53,7 @@ func (f *service) Get( return nil, fmt.Errorf("failed to read blob from gitrpc: %w", err) } - buf, err := ioutil.ReadAll(blobReader.Content) + buf, err := io.ReadAll(blobReader.Content) if err != nil { return nil, fmt.Errorf("could not read blob content from file: %w", err) } diff --git a/internal/pipeline/manager/client.go b/internal/pipeline/manager/client.go index 2f51202c4..86f85b4d6 100644 --- a/internal/pipeline/manager/client.go +++ b/internal/pipeline/manager/client.go @@ -8,8 +8,6 @@ import ( "bytes" "context" "encoding/json" - "errors" - "fmt" "github.com/harness/gitness/livelog" "github.com/harness/gitness/types" @@ -102,7 +100,8 @@ func (e *embedded) Detail(ctx context.Context, stage *drone.Stage) (*client.Cont func (e *embedded) Update(ctx context.Context, stage *drone.Stage) error { var err error convertedStage := convertFromDroneStage(stage) - if stage.Status == enum.CIStatusPending || stage.Status == enum.CIStatusRunning { + status := enum.ParseCIStatus(stage.Status) + if status == enum.CIStatusPending || status == enum.CIStatusRunning { err = e.manager.BeforeStage(ctx, convertedStage) } else { err = e.manager.AfterStage(ctx, convertedStage) @@ -115,7 +114,8 @@ func (e *embedded) Update(ctx context.Context, stage *drone.Stage) error { func (e *embedded) UpdateStep(ctx context.Context, step *drone.Step) error { var err error convertedStep := convertFromDroneStep(step) - if step.Status == enum.CIStatusPending || step.Status == enum.CIStatusRunning { + status := enum.ParseCIStatus(step.Status) + if status == enum.CIStatusPending || status == enum.CIStatusRunning { err = e.manager.BeforeStep(ctx, convertedStep) } else { err = e.manager.AfterStep(ctx, convertedStep) @@ -125,15 +125,13 @@ func (e *embedded) UpdateStep(ctx context.Context, step *drone.Step) error { } // Watch watches for build cancellation requests. -func (e *embedded) Watch(ctx context.Context, stage int64) (bool, error) { - // Implement Watch logic here - return false, errors.New("Not implemented") +func (e *embedded) Watch(ctx context.Context, executionID int64) (bool, error) { + return e.manager.Watch(ctx, executionID) } // Batch batch writes logs to the streaming logs. func (e *embedded) Batch(ctx context.Context, step int64, lines []*drone.Line) error { for _, l := range lines { - fmt.Println("line is: ", l) line := convertFromDroneLine(l) err := e.manager.Write(ctx, step, line) if err != nil { diff --git a/internal/pipeline/manager/convert.go b/internal/pipeline/manager/convert.go index d6c37adaf..6929e5aa8 100644 --- a/internal/pipeline/manager/convert.go +++ b/internal/pipeline/manager/convert.go @@ -10,6 +10,7 @@ import ( "github.com/harness/gitness/internal/pipeline/file" "github.com/harness/gitness/livelog" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" "github.com/drone/drone-go/drone" "github.com/drone/runner-go/client" @@ -23,7 +24,7 @@ func convertToDroneStage(stage *types.Stage) *drone.Stage { Name: stage.Name, Kind: stage.Kind, Type: stage.Type, - Status: stage.Status, + Status: string(stage.Status), Error: stage.Error, ErrIgnore: stage.ErrIgnore, ExitCode: stage.ExitCode, @@ -34,10 +35,10 @@ func convertToDroneStage(stage *types.Stage) *drone.Stage { Kernel: stage.Kernel, Limit: stage.Limit, LimitRepo: stage.LimitRepo, - Started: stage.Started, - Stopped: stage.Stopped, - Created: stage.Created, - Updated: stage.Updated, + Started: stage.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: stage.Stopped / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Created: stage.Created / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Updated: stage.Updated / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() Version: stage.Version, OnSuccess: stage.OnSuccess, OnFailure: stage.OnFailure, @@ -61,12 +62,12 @@ func convertToDroneStep(step *types.Step) *drone.Step { StageID: step.StageID, Number: int(step.Number), Name: step.Name, - Status: step.Status, + Status: string(step.Status), Error: step.Error, ErrIgnore: step.ErrIgnore, ExitCode: step.ExitCode, - Started: step.Started, - Stopped: step.Stopped, + Started: step.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: step.Stopped / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() Version: step.Version, DependsOn: step.DependsOn, Image: step.Image, @@ -81,12 +82,12 @@ func convertFromDroneStep(step *drone.Step) *types.Step { StageID: step.StageID, Number: int64(step.Number), Name: step.Name, - Status: step.Status, + Status: enum.ParseCIStatus(step.Status), Error: step.Error, ErrIgnore: step.ErrIgnore, ExitCode: step.ExitCode, - Started: step.Started, - Stopped: step.Stopped, + Started: step.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: step.Stopped * 1e3, Version: step.Version, DependsOn: step.DependsOn, Image: step.Image, @@ -103,12 +104,12 @@ func convertFromDroneSteps(steps []*drone.Step) []*types.Step { StageID: step.StageID, Number: int64(step.Number), Name: step.Name, - Status: step.Status, + Status: enum.ParseCIStatus(step.Status), Error: step.Error, ErrIgnore: step.ErrIgnore, ExitCode: step.ExitCode, - Started: step.Started, - Stopped: step.Stopped, + Started: step.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: step.Stopped * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() Version: step.Version, DependsOn: step.DependsOn, Image: step.Image, @@ -127,7 +128,7 @@ func convertFromDroneStage(stage *drone.Stage) *types.Stage { Name: stage.Name, Kind: stage.Kind, Type: stage.Type, - Status: stage.Status, + Status: enum.ParseCIStatus(stage.Status), Error: stage.Error, ErrIgnore: stage.ErrIgnore, ExitCode: stage.ExitCode, @@ -138,8 +139,8 @@ func convertFromDroneStage(stage *drone.Stage) *types.Stage { Kernel: stage.Kernel, Limit: stage.Limit, LimitRepo: stage.LimitRepo, - Started: stage.Started, - Stopped: stage.Stopped, + Started: stage.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: stage.Stopped * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() Version: stage.Version, OnSuccess: stage.OnSuccess, OnFailure: stage.OnFailure, @@ -164,7 +165,7 @@ func convertToDroneBuild(execution *types.Execution) *drone.Build { Trigger: execution.Trigger, Number: execution.Number, Parent: execution.Parent, - Status: execution.Status, + Status: string(execution.Status), Error: execution.Error, Event: execution.Event, Action: execution.Action, @@ -188,10 +189,10 @@ func convertToDroneBuild(execution *types.Execution) *drone.Build { Deploy: execution.Deploy, DeployID: execution.DeployID, Debug: execution.Debug, - Started: execution.Started, - Finished: execution.Finished, - Created: execution.Created, - Updated: execution.Updated, + Started: execution.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Finished: execution.Finished / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Created: execution.Created / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Updated: execution.Updated / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() Version: execution.Version, } } diff --git a/internal/pipeline/manager/manager.go b/internal/pipeline/manager/manager.go index bb4919349..0dd0ea873 100644 --- a/internal/pipeline/manager/manager.go +++ b/internal/pipeline/manager/manager.go @@ -10,9 +10,9 @@ import ( "fmt" "io" - "github.com/harness/gitness/internal/pipeline/events" "github.com/harness/gitness/internal/pipeline/file" "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" urlprovider "github.com/harness/gitness/internal/url" "github.com/harness/gitness/livelog" @@ -63,6 +63,9 @@ type ( // Request requests the next available build stage for execution. Request(ctx context.Context, args *Request) (*types.Stage, error) + // Watch watches for build cancellation requests. + Watch(ctx context.Context, executionID int64) (bool, error) + // Accept accepts the build stage for execution. Accept(ctx context.Context, stage int64, machine string) (*types.Stage, error) @@ -97,8 +100,9 @@ type Manager struct { FileService file.FileService Pipelines store.PipelineStore urlProvider *urlprovider.Provider + Checks store.CheckStore // Converter store.ConvertService - Events events.EventsStreamer + SSEStreamer sse.Streamer // Globals store.GlobalSecretStore Logs store.LogStore Logz livelog.LogStream @@ -119,10 +123,11 @@ func New( executionStore store.ExecutionStore, pipelineStore store.PipelineStore, urlProvider *urlprovider.Provider, - events events.EventsStreamer, + sseStreamer sse.Streamer, fileService file.FileService, logStore store.LogStore, logStream livelog.LogStream, + checkStore store.CheckStore, repoStore store.RepoStore, scheduler scheduler.Scheduler, secretStore store.SecretStore, @@ -135,10 +140,11 @@ func New( Executions: executionStore, Pipelines: pipelineStore, urlProvider: urlProvider, - Events: events, + SSEStreamer: sseStreamer, FileService: fileService, Logs: logStore, Logz: logStream, + Checks: checkStore, Repos: repoStore, Scheduler: scheduler, Secrets: secretStore, @@ -300,7 +306,7 @@ func (m *Manager) Details(ctx context.Context, stageID int64) (*ExecutionContext // Before signals the build step is about to start. func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error { log := log.With(). - Str("step.status", step.Status). + Str("step.status", string(step.Status)). Str("step.name", step.Name). Int64("step.id", step.ID). Logger() @@ -313,11 +319,11 @@ func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error { return err } updater := &updater{ - Executions: m.Executions, - Events: m.Events, - Repos: m.Repos, - Steps: m.Steps, - Stages: m.Stages, + Executions: m.Executions, + SSEStreamer: m.SSEStreamer, + Repos: m.Repos, + Steps: m.Steps, + Stages: m.Stages, } return updater.do(noContext, step) } @@ -325,7 +331,7 @@ func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error { // After signals the build step is complete. func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error { log := log.With(). - Str("step.status", step.Status). + Str("step.status", string(step.Status)). Str("step.name", step.Name). Int64("step.id", step.ID). Logger() @@ -333,11 +339,11 @@ func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error { var retErr error updater := &updater{ - Executions: m.Executions, - Events: m.Events, - Repos: m.Repos, - Steps: m.Steps, - Stages: m.Stages, + Executions: m.Executions, + SSEStreamer: m.SSEStreamer, + Repos: m.Repos, + Steps: m.Steps, + Stages: m.Stages, } if err := updater.do(noContext, step); err != nil { @@ -354,12 +360,14 @@ func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error { // BeforeAll signals the build stage is about to start. func (m *Manager) BeforeStage(ctx context.Context, stage *types.Stage) error { s := &setup{ - Executions: m.Executions, - Events: m.Events, - Repos: m.Repos, - Steps: m.Steps, - Stages: m.Stages, - Users: m.Users, + Executions: m.Executions, + Checks: m.Checks, + Pipelines: m.Pipelines, + SSEStreamer: m.SSEStreamer, + Repos: m.Repos, + Steps: m.Steps, + Stages: m.Stages, + Users: m.Users, } return s.do(noContext, stage) @@ -368,13 +376,48 @@ func (m *Manager) BeforeStage(ctx context.Context, stage *types.Stage) error { // AfterAll signals the build stage is complete. func (m *Manager) AfterStage(ctx context.Context, stage *types.Stage) error { t := &teardown{ - Executions: m.Executions, - Events: m.Events, - Logs: m.Logz, - Repos: m.Repos, - Scheduler: m.Scheduler, - Steps: m.Steps, - Stages: m.Stages, + Executions: m.Executions, + Pipelines: m.Pipelines, + Checks: m.Checks, + SSEStreamer: m.SSEStreamer, + Logs: m.Logz, + Repos: m.Repos, + Scheduler: m.Scheduler, + Steps: m.Steps, + Stages: m.Stages, } return t.do(noContext, stage) } + +// Watch watches for build cancellation requests. +func (m *Manager) Watch(ctx context.Context, executionID int64) (bool, error) { + ok, err := m.Scheduler.Cancelled(ctx, executionID) + // we expect a context cancel error here which + // indicates a polling timeout. The subscribing + // client should look for the context cancel error + // and resume polling. + if err != nil { + return ok, err + } + + // // TODO: we should be able to return + // // immediately if Cancelled returns true. This requires + // // some more testing but would avoid the extra database + // // call. + // if ok { + // return ok, err + // } + + // if no error is returned we should check + // the database to see if the build is complete. If + // complete, return true. + execution, err := m.Executions.Find(ctx, executionID) + if err != nil { + log := log.With(). + Int64("execution.id", executionID). + Logger() + log.Warn().Msg("manager: cannot find build") + return ok, fmt.Errorf("could not find build for cancellation: %w", err) + } + return execution.Status.IsDone(), nil +} diff --git a/internal/pipeline/manager/setup.go b/internal/pipeline/manager/setup.go index 16126498c..7e7980dc1 100644 --- a/internal/pipeline/manager/setup.go +++ b/internal/pipeline/manager/setup.go @@ -6,11 +6,11 @@ package manager import ( "context" - "encoding/json" "errors" "time" - "github.com/harness/gitness/internal/pipeline/events" + "github.com/harness/gitness/internal/pipeline/checks" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" gitness_store "github.com/harness/gitness/store" "github.com/harness/gitness/types" @@ -20,12 +20,14 @@ import ( ) type setup struct { - Executions store.ExecutionStore - Events events.EventsStreamer - Repos store.RepoStore - Steps store.StepStore - Stages store.StageStore - Users store.PrincipalStore + Executions store.ExecutionStore + Checks store.CheckStore + SSEStreamer sse.Streamer + Pipelines store.PipelineStore + Repos store.RepoStore + Steps store.StepStore + Stages store.StageStore + Users store.PrincipalStore } func (s *setup) do(ctx context.Context, stage *types.Stage) error { @@ -54,7 +56,7 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error { err = s.Stages.Update(noContext, stage) if err != nil { log.Error().Err(err). - Str("stage.status", stage.Status). + Str("stage.status", string(stage.Status)). Msg("manager: cannot update the stage") return err } @@ -67,7 +69,7 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error { err := s.Steps.Create(noContext, step) if err != nil { log.Error().Err(err). - Str("stage.status", stage.Status). + Str("stage.status", string(stage.Status)). Str("step.name", step.Name). Int64("step.id", step.ID). Msg("manager: cannot persist the step") @@ -80,13 +82,23 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error { log.Error().Err(err).Msg("manager: cannot update the execution") return err } + pipeline, err := s.Pipelines.Find(ctx, execution.PipelineID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find pipeline") + return err + } + // try to write to the checks store - if not, log an error and continue + err = checks.Write(ctx, s.Checks, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("manager: could not write to checks store") + } stages, err := s.Stages.ListWithSteps(noContext, execution.ID) if err != nil { log.Error().Err(err).Msg("manager: could not list stages with steps") return err } execution.Stages = stages - err = s.Events.Publish(noContext, repo.ParentID, executionEvent(enum.ExecutionRunning, execution)) + err = s.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionRunning, execution) if err != nil { log.Warn().Err(err).Msg("manager: could not publish execution event") } @@ -94,18 +106,6 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error { return nil } -func executionEvent( - eventType enum.EventType, - execution *types.Execution, -) *events.Event { - // json.Marshal will not return an error here, it can be absorbed - bytes, _ := json.Marshal(execution) - return &events.Event{ - Type: eventType, - Data: bytes, - } -} - // helper function that updates the execution status from pending to running. // This accounts for the fact that another agent may have already updated // the execution status, which may happen if two stages execute concurrently. diff --git a/internal/pipeline/manager/teardown.go b/internal/pipeline/manager/teardown.go index 63d827686..80fc1c70d 100644 --- a/internal/pipeline/manager/teardown.go +++ b/internal/pipeline/manager/teardown.go @@ -8,8 +8,9 @@ import ( "context" "time" - "github.com/harness/gitness/internal/pipeline/events" + "github.com/harness/gitness/internal/pipeline/checks" "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/livelog" gitness_store "github.com/harness/gitness/store" @@ -20,13 +21,15 @@ import ( ) type teardown struct { - Executions store.ExecutionStore - Events events.EventsStreamer - Logs livelog.LogStream - Scheduler scheduler.Scheduler - Repos store.RepoStore - Steps store.StepStore - Stages store.StageStore + Executions store.ExecutionStore + Checks store.CheckStore + Pipelines store.PipelineStore + SSEStreamer sse.Streamer + Logs livelog.LogStream + Scheduler scheduler.Scheduler + Repos store.RepoStore + Steps store.StepStore + Stages store.StageStore } func (t *teardown) do(ctx context.Context, stage *types.Stage) error { @@ -45,7 +48,7 @@ func (t *teardown) do(ctx context.Context, stage *types.Stage) error { Int64("execution.number", execution.Number). Int64("execution.id", execution.ID). Int64("repo.id", execution.RepoID). - Str("stage.status", stage.Status). + Str("stage.status", string(stage.Status)). Logger() repo, err := t.Repos.Find(noContext, execution.RepoID) @@ -134,12 +137,23 @@ func (t *teardown) do(ctx context.Context, stage *types.Stage) error { } execution.Stages = stages - err = t.Events.Publish(noContext, repo.ParentID, executionEvent(enum.ExecutionCompleted, execution)) + err = t.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionCompleted, execution) if err != nil { log.Warn().Err(err). Msg("manager: could not publish execution completed event") } + pipeline, err := t.Pipelines.Find(ctx, execution.PipelineID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find pipeline") + return err + } + // try to write to the checks store - if not, log an error and continue + err = checks.Write(ctx, t.Checks, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("manager: could not write to checks store") + } + return nil } diff --git a/internal/pipeline/manager/updater.go b/internal/pipeline/manager/updater.go index 0b9f801ba..2ff12c5a9 100644 --- a/internal/pipeline/manager/updater.go +++ b/internal/pipeline/manager/updater.go @@ -7,7 +7,7 @@ package manager import ( "context" - "github.com/harness/gitness/internal/pipeline/events" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/types" "github.com/harness/gitness/types/enum" @@ -16,17 +16,17 @@ import ( ) type updater struct { - Executions store.ExecutionStore - Repos store.RepoStore - Events events.EventsStreamer - Steps store.StepStore - Stages store.StageStore + Executions store.ExecutionStore + Repos store.RepoStore + SSEStreamer sse.Streamer + Steps store.StepStore + Stages store.StageStore } func (u *updater) do(ctx context.Context, step *types.Step) error { log := log.With(). Str("step.name", step.Name). - Str("step.status", step.Status). + Str("step.status", string(step.Status)). Int64("step.id", step.ID). Logger() @@ -64,7 +64,7 @@ func (u *updater) do(ctx context.Context, step *types.Step) error { } execution.Stages = stages - err = u.Events.Publish(noContext, repo.ParentID, executionEvent(enum.ExecutionUpdated, execution)) + err = u.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionUpdated, execution) if err != nil { log.Warn().Err(err).Msg("manager: cannot publish execution updated event") } diff --git a/internal/pipeline/manager/wire.go b/internal/pipeline/manager/wire.go index f097fd710..26cf77560 100644 --- a/internal/pipeline/manager/wire.go +++ b/internal/pipeline/manager/wire.go @@ -5,9 +5,9 @@ package manager import ( - "github.com/harness/gitness/internal/pipeline/events" "github.com/harness/gitness/internal/pipeline/file" "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/internal/url" "github.com/harness/gitness/livelog" @@ -29,18 +29,19 @@ func ProvideExecutionManager( executionStore store.ExecutionStore, pipelineStore store.PipelineStore, urlProvider *url.Provider, - events events.EventsStreamer, + sseStreamer sse.Streamer, fileService file.FileService, logStore store.LogStore, logStream livelog.LogStream, + checkStore store.CheckStore, repoStore store.RepoStore, scheduler scheduler.Scheduler, secretStore store.SecretStore, stageStore store.StageStore, stepStore store.StepStore, userStore store.PrincipalStore) ExecutionManager { - return New(config, executionStore, pipelineStore, urlProvider, events, fileService, logStore, - logStream, repoStore, scheduler, secretStore, stageStore, stepStore, userStore) + return New(config, executionStore, pipelineStore, urlProvider, sseStreamer, fileService, logStore, + logStream, checkStore, repoStore, scheduler, secretStore, stageStore, stepStore, userStore) } // ProvideExecutionClient provides a client implementation to interact with the execution manager. diff --git a/internal/pipeline/scheduler/canceler.go b/internal/pipeline/scheduler/canceler.go new file mode 100644 index 000000000..b161028db --- /dev/null +++ b/internal/pipeline/scheduler/canceler.go @@ -0,0 +1,80 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package scheduler + +import ( + "context" + "sync" + "time" +) + +type canceler struct { + sync.Mutex + + subscribers map[chan struct{}]int64 + cancelled map[int64]time.Time +} + +func newCanceler() *canceler { + return &canceler{ + subscribers: make(map[chan struct{}]int64), + cancelled: make(map[int64]time.Time), + } +} + +func (c *canceler) Cancel(ctx context.Context, id int64) error { + c.Lock() + defer c.Unlock() + c.cancelled[id] = time.Now().Add(time.Minute * 5) + for subscriber, build := range c.subscribers { + if id == build { + close(subscriber) + } + } + c.collect() + return nil +} + +func (c *canceler) Cancelled(ctx context.Context, id int64) (bool, error) { + subscriber := make(chan struct{}) + c.Lock() + c.subscribers[subscriber] = id + c.Unlock() + + defer func() { + c.Lock() + delete(c.subscribers, subscriber) + c.Unlock() + }() + + for { + select { + case <-ctx.Done(): + return false, ctx.Err() + case <-time.After(time.Minute): + c.Lock() + _, ok := c.cancelled[id] + c.Unlock() + if ok { + return true, nil + } + case <-subscriber: + return true, nil + } + } +} + +func (c *canceler) collect() { + // the list of cancelled builds is stored with a ttl, and + // is not removed until the ttl is reached. This provides + // adequate window for clients with connectivity issues to + // reconnect and receive notification of cancel events. + now := time.Now() + for build, timestamp := range c.cancelled { + if now.After(timestamp) { + delete(c.cancelled, build) + } + } +} diff --git a/internal/pipeline/scheduler/scheduler.go b/internal/pipeline/scheduler/scheduler.go index a6a6f9d8d..9588e7062 100644 --- a/internal/pipeline/scheduler/scheduler.go +++ b/internal/pipeline/scheduler/scheduler.go @@ -7,6 +7,8 @@ package scheduler import ( "context" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" "github.com/harness/gitness/types" ) @@ -29,4 +31,29 @@ type Scheduler interface { // Request requests the next stage scheduled for execution. Request(ctx context.Context, filter Filter) (*types.Stage, error) + + // Cancel cancels scheduled or running jobs associated + // with the parent build ID. + Cancel(context.Context, int64) error + + // Cancelled blocks and listens for a cancellation event and + // returns true if the build has been cancelled. + Cancelled(context.Context, int64) (bool, error) +} + +type scheduler struct { + *queue + *canceler +} + +// newScheduler provides an instance of a scheduler with cancel abilities +func newScheduler(stageStore store.StageStore, lock lock.MutexManager) (Scheduler, error) { + q, err := newQueue(stageStore, lock) + if err != nil { + return nil, err + } + return scheduler{ + q, + newCanceler(), + }, nil } diff --git a/internal/pipeline/scheduler/wire.go b/internal/pipeline/scheduler/wire.go index bf4169dac..e69aeaf43 100644 --- a/internal/pipeline/scheduler/wire.go +++ b/internal/pipeline/scheduler/wire.go @@ -21,5 +21,5 @@ func ProvideScheduler( stageStore store.StageStore, lock lock.MutexManager, ) (Scheduler, error) { - return newQueue(stageStore, lock) + return newScheduler(stageStore, lock) } diff --git a/internal/pipeline/triggerer/trigger.go b/internal/pipeline/triggerer/trigger.go index e8c1c70ad..97fe1ec4d 100644 --- a/internal/pipeline/triggerer/trigger.go +++ b/internal/pipeline/triggerer/trigger.go @@ -9,6 +9,7 @@ import ( "runtime/debug" "time" + "github.com/harness/gitness/internal/pipeline/checks" "github.com/harness/gitness/internal/pipeline/file" "github.com/harness/gitness/internal/pipeline/scheduler" "github.com/harness/gitness/internal/pipeline/triggerer/dag" @@ -29,6 +30,7 @@ var _ Triggerer = (*triggerer)(nil) type Hook struct { Parent int64 `json:"parent"` Trigger string `json:"trigger"` + TriggeredBy int64 `json:"triggered_by"` Action enum.TriggerAction `json:"action"` Link string `json:"link"` Timestamp int64 `json:"timestamp"` @@ -59,6 +61,7 @@ type Triggerer interface { type triggerer struct { executionStore store.ExecutionStore + checkStore store.CheckStore stageStore store.StageStore db *sqlx.DB pipelineStore store.PipelineStore @@ -69,6 +72,7 @@ type triggerer struct { func New( executionStore store.ExecutionStore, + checkStore store.CheckStore, stageStore store.StageStore, pipelineStore store.PipelineStore, db *sqlx.DB, @@ -78,6 +82,7 @@ func New( ) *triggerer { return &triggerer{ executionStore: executionStore, + checkStore: checkStore, stageStore: stageStore, scheduler: scheduler, db: db, @@ -279,6 +284,7 @@ func (t *triggerer) Trigger( RepoID: repo.ID, PipelineID: pipeline.ID, Trigger: base.Trigger, + CreatedBy: base.TriggeredBy, Number: pipeline.Seq, Parent: base.Parent, Status: enum.CIStatusPending, @@ -308,8 +314,8 @@ func (t *triggerer) Trigger( stages := make([]*types.Stage, len(matched)) for i, match := range matched { - onSuccess := match.Trigger.Status.Match(enum.CIStatusSuccess) - onFailure := match.Trigger.Status.Match(enum.CIStatusFailure) + onSuccess := match.Trigger.Status.Match(string(enum.CIStatusSuccess)) + onFailure := match.Trigger.Status.Match(string(enum.CIStatusFailure)) if len(match.Trigger.Status.Include)+len(match.Trigger.Status.Exclude) == 0 { onFailure = false } @@ -377,6 +383,12 @@ func (t *triggerer) Trigger( return nil, err } + // try to write to check store. log on failure but don't error out the execution + err = checks.Write(ctx, t.checkStore, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("trigger: could not write to check store") + } + // err = t.status.Send(ctx, user, &core.StatusInput{ // Repo: repo, // Execution: execution, @@ -453,6 +465,7 @@ func (t *triggerer) createExecutionWithError( execution := &types.Execution{ RepoID: pipeline.RepoID, + PipelineID: pipeline.ID, Number: pipeline.Seq, Parent: base.Parent, Status: enum.CIStatusError, @@ -462,6 +475,7 @@ func (t *triggerer) createExecutionWithError( Link: base.Link, Title: base.Title, Message: base.Message, + CreatedBy: base.TriggeredBy, Before: base.Before, After: base.After, Ref: base.Ref, @@ -486,5 +500,11 @@ func (t *triggerer) createExecutionWithError( return nil, err } + // try to write to check store, log on failure + err = checks.Write(ctx, t.checkStore, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("trigger: failed to update check") + } + return execution, nil } diff --git a/internal/pipeline/triggerer/wire.go b/internal/pipeline/triggerer/wire.go index 23139dcf3..a9f304f1b 100644 --- a/internal/pipeline/triggerer/wire.go +++ b/internal/pipeline/triggerer/wire.go @@ -21,6 +21,7 @@ var WireSet = wire.NewSet( // ProvideTriggerer provides a triggerer which can execute builds. func ProvideTriggerer( executionStore store.ExecutionStore, + checkStore store.CheckStore, stageStore store.StageStore, db *sqlx.DB, pipelineStore store.PipelineStore, @@ -28,6 +29,6 @@ func ProvideTriggerer( scheduler scheduler.Scheduler, repoStore store.RepoStore, ) Triggerer { - return New(executionStore, stageStore, pipelineStore, + return New(executionStore, checkStore, stageStore, pipelineStore, db, repoStore, scheduler, fileService) } diff --git a/internal/router/api.go b/internal/router/api.go index b449a651b..47da4fb47 100644 --- a/internal/router/api.go +++ b/internal/router/api.go @@ -189,7 +189,7 @@ func setupSpaces(r chi.Router, spaceCtrl *space.Controller) { r.Patch("/", handlerspace.HandleUpdate(spaceCtrl)) r.Delete("/", handlerspace.HandleDelete(spaceCtrl)) - r.Get("/stream", handlerspace.HandleEventsStream(spaceCtrl)) + r.Get("/events", handlerspace.HandleEvents(spaceCtrl)) r.Post("/move", handlerspace.HandleMove(spaceCtrl)) r.Get("/spaces", handlerspace.HandleListSpaces(spaceCtrl)) @@ -408,7 +408,7 @@ func setupExecutions( r.Post("/", handlerexecution.HandleCreate(executionCtrl)) r.Route(fmt.Sprintf("/{%s}", request.PathParamExecutionNumber), func(r chi.Router) { r.Get("/", handlerexecution.HandleFind(executionCtrl)) - r.Patch("/", handlerexecution.HandleUpdate(executionCtrl)) + r.Post("/cancel", handlerexecution.HandleCancel(executionCtrl)) r.Delete("/", handlerexecution.HandleDelete(executionCtrl)) r.Get( fmt.Sprintf("/logs/{%s}/{%s}", diff --git a/internal/services/importer/id.go b/internal/services/importer/id.go new file mode 100644 index 000000000..d8d28be39 --- /dev/null +++ b/internal/services/importer/id.go @@ -0,0 +1,24 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package importer + +import ( + "strconv" + "strings" +) + +const jobIDPrefix = "import-repo-" + +func JobIDFromRepoID(repoID int64) string { + return jobIDPrefix + strconv.FormatInt(repoID, 10) +} + +func RepoIDFromJobID(jobID string) int64 { + if !strings.HasPrefix(jobID, jobIDPrefix) { + return 0 + } + repoID, _ := strconv.ParseInt(jobID[len(jobIDPrefix):], 10, 64) + return repoID +} diff --git a/internal/services/importer/provider.go b/internal/services/importer/provider.go index 6fcc675c3..bddf057f8 100644 --- a/internal/services/importer/provider.go +++ b/internal/services/importer/provider.go @@ -6,7 +6,8 @@ package importer import ( "context" - "errors" + "crypto/sha512" + "encoding/base32" "fmt" "net/http" "time" @@ -55,31 +56,35 @@ func (r *RepositoryInfo) ToRepo( path string, uid string, description string, - jobUID string, principal *types.Principal, ) *types.Repository { now := time.Now().UnixMilli() - gitTempUID := "importing-" + jobUID + gitTempUID := fmt.Sprintf("importing-%s-%d", hash(path), now) return &types.Repository{ - Version: 0, - ParentID: spaceID, - UID: uid, - GitUID: gitTempUID, // the correct git UID will be set by the job handler - Path: path, - Description: description, - IsPublic: r.IsPublic, - CreatedBy: principal.ID, - Created: now, - Updated: now, - ForkID: 0, - DefaultBranch: r.DefaultBranch, - Importing: true, - ImportingJobUID: &jobUID, + Version: 0, + ParentID: spaceID, + UID: uid, + GitUID: gitTempUID, // the correct git UID will be set by the job handler + Path: path, + Description: description, + IsPublic: r.IsPublic, + CreatedBy: principal.ID, + Created: now, + Updated: now, + ForkID: 0, + DefaultBranch: r.DefaultBranch, + Importing: true, } } -func getClient(provider Provider) (*scm.Client, error) { - if provider.Username == "" || provider.Password == "" { +func hash(s string) string { + h := sha512.New() + _, _ = h.Write([]byte(s)) + return base32.StdEncoding.EncodeToString(h.Sum(nil)[:10]) +} + +func getClient(provider Provider, authReq bool) (*scm.Client, error) { + if authReq && (provider.Username == "" || provider.Password == "") { return nil, usererror.BadRequest("scm provider authentication credentials missing") } @@ -114,16 +119,18 @@ func getClient(provider Provider) (*scm.Client, error) { return nil, usererror.BadRequestf("unsupported scm provider: %s", provider) } - c.Client = &http.Client{ - Transport: &oauth2.Transport{ - Source: oauth2.StaticTokenSource(&scm.Token{Token: provider.Password}), - }, + if provider.Password != "" { + c.Client = &http.Client{ + Transport: &oauth2.Transport{ + Source: oauth2.StaticTokenSource(&scm.Token{Token: provider.Password}), + }, + } } return c, nil } func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug string) (RepositoryInfo, error) { - scmClient, err := getClient(provider) + scmClient, err := getClient(provider, false) if err != nil { return RepositoryInfo{}, err } @@ -132,24 +139,9 @@ func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug return RepositoryInfo{}, usererror.BadRequest("provider repository identifier is missing") } - var statusCode int scmRepo, scmResp, err := scmClient.Repositories.Find(ctx, repoSlug) - if scmResp != nil { - statusCode = scmResp.Status - } - - if errors.Is(err, scm.ErrNotFound) || statusCode == http.StatusNotFound { - return RepositoryInfo{}, - usererror.BadRequestf("repository %s not found at %s", repoSlug, provider.Type) - } - if errors.Is(err, scm.ErrNotAuthorized) || statusCode == http.StatusUnauthorized { - return RepositoryInfo{}, - usererror.BadRequestf("bad credentials provided for %s at %s", repoSlug, provider.Type) - } - if err != nil || statusCode > 299 { - return RepositoryInfo{}, - fmt.Errorf("failed to fetch repository %s from %s, status=%d: %w", - repoSlug, provider.Type, statusCode, err) + if err = convertSCMError(provider, repoSlug, scmResp, err); err != nil { + return RepositoryInfo{}, err } return RepositoryInfo{ @@ -162,7 +154,7 @@ func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug } func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, spaceSlug string) ([]RepositoryInfo, error) { - scmClient, err := getClient(provider) + scmClient, err := getClient(provider, true) if err != nil { return nil, err } @@ -172,27 +164,23 @@ func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, s } const pageSize = 100 - opts := scm.ListOptions{Page: 0, Size: pageSize} + opts := scm.RepoListOptions{ + ListOptions: scm.ListOptions{ + Page: 0, + Size: pageSize, + }, + RepoSearchTerm: scm.RepoSearchTerm{ + User: spaceSlug, + }, + } repos := make([]RepositoryInfo, 0) for { opts.Page++ - var statusCode int - scmRepos, scmResp, err := scmClient.Repositories.List(ctx, opts) - if scmResp != nil { - statusCode = scmResp.Status - } - - if errors.Is(err, scm.ErrNotFound) || statusCode == http.StatusNotFound { - return nil, usererror.BadRequestf("space %s not found at %s", spaceSlug, provider.Type) - } - if errors.Is(err, scm.ErrNotAuthorized) || statusCode == http.StatusUnauthorized { - return nil, usererror.BadRequestf("bad credentials provided for %s at %s", spaceSlug, provider.Type) - } - if err != nil || statusCode > 299 { - return nil, fmt.Errorf("failed to fetch space %s from %s, status=%d: %w", - spaceSlug, provider.Type, statusCode, err) + scmRepos, scmResp, err := scmClient.Repositories.ListV2(ctx, opts) + if err = convertSCMError(provider, spaceSlug, scmResp, err); err != nil { + return nil, err } if len(scmRepos) == 0 { @@ -200,9 +188,6 @@ func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, s } for _, scmRepo := range scmRepos { - if scmRepo.Namespace != spaceSlug { - continue - } repos = append(repos, RepositoryInfo{ Space: scmRepo.Namespace, UID: scmRepo.Name, @@ -215,3 +200,34 @@ func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, s return repos, nil } + +func convertSCMError(provider Provider, slug string, r *scm.Response, err error) error { + if err == nil { + return nil + } + + if r == nil { + if provider.Host != "" { + return usererror.BadRequestf("failed to make HTTP request to %s (host=%s): %s", + provider.Type, provider.Host, err) + } else { + return usererror.BadRequestf("failed to make HTTP request to %s: %s", + provider.Type, err) + } + } + + switch r.Status { + case http.StatusNotFound: + return usererror.BadRequestf("couldn't find %s at %s: %s", + slug, provider.Type, err.Error()) + case http.StatusUnauthorized: + return usererror.BadRequestf("bad credentials provided for %s at %s: %s", + slug, provider.Type, err.Error()) + case http.StatusForbidden: + return usererror.BadRequestf("access denied to %s at %s: %s", + slug, provider.Type, err.Error()) + default: + return usererror.BadRequestf("failed to fetch %s from %s (HTTP status %d): %s", + slug, provider.Type, r.Status, err.Error()) + } +} diff --git a/internal/services/importer/repository.go b/internal/services/importer/repository.go index 028fd175e..6804bcde4 100644 --- a/internal/services/importer/repository.go +++ b/internal/services/importer/repository.go @@ -19,10 +19,12 @@ import ( "github.com/harness/gitness/internal/bootstrap" "github.com/harness/gitness/internal/githook" "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" gitnessurl "github.com/harness/gitness/internal/url" gitness_store "github.com/harness/gitness/store" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" "github.com/rs/zerolog/log" ) @@ -39,6 +41,7 @@ type Repository struct { repoStore store.RepoStore encrypter encrypt.Encrypter scheduler *job.Scheduler + sseStreamer sse.Streamer } var _ job.Handler = (*Repository)(nil) @@ -58,7 +61,7 @@ func (r *Repository) Register(executor *job.Executor) error { // Run starts a background job that imports the provided repository from the provided clone URL. func (r *Repository) Run(ctx context.Context, provider Provider, repo *types.Repository, cloneURL string) error { - jobDef, err := r.getJobDef(*repo.ImportingJobUID, Input{ + jobDef, err := r.getJobDef(JobIDFromRepoID(repo.ID), Input{ RepoID: repo.ID, GitUser: provider.Username, GitPass: provider.Password, @@ -90,7 +93,7 @@ func (r *Repository) RunMany(ctx context.Context, repo := repos[k] cloneURL := cloneURLs[k] - jobDef, err := r.getJobDef(*repo.ImportingJobUID, Input{ + jobDef, err := r.getJobDef(JobIDFromRepoID(repo.ID), Input{ RepoID: repo.ID, GitUser: provider.Username, GitPass: provider.Password, @@ -167,16 +170,14 @@ func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressRepo return "", errors.New("missing git repository clone URL") } - if input.GitUser != "" || input.GitPass != "" { - repoURL, err := url.Parse(input.CloneURL) - if err != nil { - return "", fmt.Errorf("failed to parse git clone URL: %w", err) - } - - repoURL.User = url.UserPassword(input.GitUser, input.GitPass) - input.CloneURL = repoURL.String() + repoURL, err := url.Parse(input.CloneURL) + if err != nil { + return "", fmt.Errorf("failed to parse git clone URL: %w", err) } + repoURL.User = url.UserPassword(input.GitUser, input.GitPass) + cloneURLWithAuth := repoURL.String() + repo, err := r.repoStore.Find(ctx, input.RepoID) if err != nil { return "", fmt.Errorf("failed to find repo by id: %w", err) @@ -186,23 +187,38 @@ func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressRepo return "", fmt.Errorf("repository %s is not being imported", repo.UID) } + log := log.Ctx(ctx).With(). + Int64("repo.id", repo.ID). + Str("repo.path", repo.Path). + Logger() + + log.Info().Msg("create git repository") + gitUID, err := r.createGitRepository(ctx, &systemPrincipal, repo.ID) if err != nil { return "", fmt.Errorf("failed to create empty git repository: %w", err) } + log.Info().Msgf("successfully created git repository with git_uid '%s'", gitUID) + err = func() error { repo.GitUID = gitUID - defaultBranch, err := r.syncGitRepository(ctx, &systemPrincipal, repo, input.CloneURL) + log.Info().Msg("sync repository") + + defaultBranch, err := r.syncGitRepository(ctx, &systemPrincipal, repo, cloneURLWithAuth) if err != nil { return fmt.Errorf("failed to sync git repository from '%s': %w", input.CloneURL, err) } + log.Info().Msgf("successfully synced repository (returned default branch: '%s')", defaultBranch) + if defaultBranch == "" { defaultBranch = r.defaultBranch } + log.Info().Msg("update repo in DB") + repo, err = r.repoStore.UpdateOptLock(ctx, repo, func(repo *types.Repository) error { if !repo.Importing { return errors.New("repository has already finished importing") @@ -221,25 +237,33 @@ func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressRepo return nil }() if err != nil { + log.Error().Err(err).Msg("failed repository import - cleanup git repository") + if errDel := r.deleteGitRepository(ctx, &systemPrincipal, repo); errDel != nil { - log.Ctx(ctx).Err(err). - Str("gitUID", gitUID). + log.Warn().Err(errDel). Msg("failed to delete git repository after failed import") } return "", fmt.Errorf("failed to import repository: %w", err) } + err = r.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypeRepositoryImportCompleted, repo) + if err != nil { + log.Warn().Err(err).Msg("failed to publish import completion SSE") + } + + log.Info().Msg("completed repository import") + return "", nil } func (r *Repository) GetProgress(ctx context.Context, repo *types.Repository) (types.JobProgress, error) { - if !repo.Importing || repo.ImportingJobUID == nil || *repo.ImportingJobUID == "" { + if !repo.Importing { // if the repo is not being imported, or it's job ID has been cleared (or never existed) return state=finished return job.DoneProgress(), nil } - progress, err := r.scheduler.GetJobProgress(ctx, *repo.ImportingJobUID) + progress, err := r.scheduler.GetJobProgress(ctx, JobIDFromRepoID(repo.ID)) if errors.Is(err, gitness_store.ErrResourceNotFound) { // if the job is not found return state=failed return job.FailProgress(), nil @@ -252,11 +276,11 @@ func (r *Repository) GetProgress(ctx context.Context, repo *types.Repository) (t } func (r *Repository) Cancel(ctx context.Context, repo *types.Repository) error { - if repo.ImportingJobUID == nil || *repo.ImportingJobUID == "" { + if !repo.Importing { return nil } - err := r.scheduler.CancelJob(ctx, *repo.ImportingJobUID) + err := r.scheduler.CancelJob(ctx, JobIDFromRepoID(repo.ID)) if err != nil { return fmt.Errorf("failed to cancel job: %w", err) } diff --git a/internal/services/importer/wire.go b/internal/services/importer/wire.go index cdeb81623..14062ab64 100644 --- a/internal/services/importer/wire.go +++ b/internal/services/importer/wire.go @@ -8,6 +8,7 @@ import ( "github.com/harness/gitness/encrypt" "github.com/harness/gitness/gitrpc" "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/sse" "github.com/harness/gitness/internal/store" "github.com/harness/gitness/internal/url" "github.com/harness/gitness/types" @@ -27,6 +28,7 @@ func ProvideRepoImporter( encrypter encrypt.Encrypter, scheduler *job.Scheduler, executor *job.Executor, + sseStreamer sse.Streamer, ) (*Repository, error) { importer := &Repository{ defaultBranch: config.Git.DefaultBranch, @@ -35,6 +37,7 @@ func ProvideRepoImporter( repoStore: repoStore, encrypter: encrypter, scheduler: scheduler, + sseStreamer: sseStreamer, } err := executor.Register(jobType, importer) diff --git a/internal/services/trigger/handler_branch.go b/internal/services/trigger/handler_branch.go index 07b54dcd0..609c28c4e 100644 --- a/internal/services/trigger/handler_branch.go +++ b/internal/services/trigger/handler_branch.go @@ -10,6 +10,7 @@ import ( "strings" "github.com/harness/gitness/events" + "github.com/harness/gitness/internal/bootstrap" gitevents "github.com/harness/gitness/internal/events/git" "github.com/harness/gitness/internal/pipeline/triggerer" "github.com/harness/gitness/types/enum" @@ -23,12 +24,13 @@ func ExtractBranch(ref string) string { func (s *Service) handleEventBranchCreated(ctx context.Context, event *events.Event[*gitevents.BranchCreatedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionBranchCreated, - Ref: event.Payload.Ref, - Source: ExtractBranch(event.Payload.Ref), - Target: ExtractBranch(event.Payload.Ref), - After: event.Payload.SHA, + Trigger: enum.TriggerHook, + Action: enum.TriggerActionBranchCreated, + Ref: event.Payload.Ref, + Source: ExtractBranch(event.Payload.Ref), + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Target: ExtractBranch(event.Payload.Ref), + After: event.Payload.SHA, } err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.SHA) if err != nil { @@ -40,13 +42,14 @@ func (s *Service) handleEventBranchCreated(ctx context.Context, func (s *Service) handleEventBranchUpdated(ctx context.Context, event *events.Event[*gitevents.BranchUpdatedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionBranchUpdated, - Ref: event.Payload.Ref, - Before: event.Payload.OldSHA, - After: event.Payload.NewSHA, - Source: ExtractBranch(event.Payload.Ref), - Target: ExtractBranch(event.Payload.Ref), + Trigger: enum.TriggerHook, + Action: enum.TriggerActionBranchUpdated, + Ref: event.Payload.Ref, + Before: event.Payload.OldSHA, + After: event.Payload.NewSHA, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Source: ExtractBranch(event.Payload.Ref), + Target: ExtractBranch(event.Payload.Ref), } err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.NewSHA) if err != nil { diff --git a/internal/services/trigger/handler_pullreq.go b/internal/services/trigger/handler_pullreq.go index fd65444de..5ee872a87 100644 --- a/internal/services/trigger/handler_pullreq.go +++ b/internal/services/trigger/handler_pullreq.go @@ -9,6 +9,7 @@ import ( "fmt" "github.com/harness/gitness/events" + "github.com/harness/gitness/internal/bootstrap" pullreqevents "github.com/harness/gitness/internal/events/pullreq" "github.com/harness/gitness/internal/pipeline/triggerer" "github.com/harness/gitness/types/enum" @@ -19,9 +20,10 @@ import ( func (s *Service) handleEventPullReqCreated(ctx context.Context, event *events.Event[*pullreqevents.CreatedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionPullReqCreated, - After: event.Payload.SourceSHA, + Trigger: enum.TriggerHook, + Action: enum.TriggerActionPullReqCreated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + After: event.Payload.SourceSHA, } err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID) if err != nil { @@ -33,9 +35,10 @@ func (s *Service) handleEventPullReqCreated(ctx context.Context, func (s *Service) handleEventPullReqReopened(ctx context.Context, event *events.Event[*pullreqevents.ReopenedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionPullReqReopened, - After: event.Payload.SourceSHA, + Trigger: enum.TriggerHook, + Action: enum.TriggerActionPullReqReopened, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + After: event.Payload.SourceSHA, } err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID) if err != nil { @@ -47,9 +50,10 @@ func (s *Service) handleEventPullReqReopened(ctx context.Context, func (s *Service) handleEventPullReqBranchUpdated(ctx context.Context, event *events.Event[*pullreqevents.BranchUpdatedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionPullReqBranchUpdated, - After: event.Payload.NewSHA, + Trigger: enum.TriggerHook, + Action: enum.TriggerActionPullReqBranchUpdated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + After: event.Payload.NewSHA, } err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID) if err != nil { diff --git a/internal/services/trigger/handler_tag.go b/internal/services/trigger/handler_tag.go index b96c523fb..4d9a30ce7 100644 --- a/internal/services/trigger/handler_tag.go +++ b/internal/services/trigger/handler_tag.go @@ -9,6 +9,7 @@ import ( "fmt" "github.com/harness/gitness/events" + "github.com/harness/gitness/internal/bootstrap" gitevents "github.com/harness/gitness/internal/events/git" "github.com/harness/gitness/internal/pipeline/triggerer" "github.com/harness/gitness/types/enum" @@ -17,13 +18,14 @@ import ( func (s *Service) handleEventTagCreated(ctx context.Context, event *events.Event[*gitevents.TagCreatedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionTagCreated, - Ref: event.Payload.Ref, - Before: event.Payload.SHA, - After: event.Payload.SHA, - Source: event.Payload.Ref, - Target: event.Payload.Ref, + Trigger: enum.TriggerHook, + Action: enum.TriggerActionTagCreated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Ref: event.Payload.Ref, + Before: event.Payload.SHA, + After: event.Payload.SHA, + Source: event.Payload.Ref, + Target: event.Payload.Ref, } err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.SHA) if err != nil { @@ -35,13 +37,14 @@ func (s *Service) handleEventTagCreated(ctx context.Context, func (s *Service) handleEventTagUpdated(ctx context.Context, event *events.Event[*gitevents.TagUpdatedPayload]) error { hook := &triggerer.Hook{ - Trigger: enum.TriggerHook, - Action: enum.TriggerActionTagUpdated, - Ref: event.Payload.Ref, - Before: event.Payload.OldSHA, - After: event.Payload.NewSHA, - Source: event.Payload.Ref, - Target: event.Payload.Ref, + Trigger: enum.TriggerHook, + Action: enum.TriggerActionTagUpdated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Ref: event.Payload.Ref, + Before: event.Payload.OldSHA, + After: event.Payload.NewSHA, + Source: event.Payload.Ref, + Target: event.Payload.Ref, } err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.NewSHA) if err != nil { diff --git a/internal/services/trigger/service.go b/internal/services/trigger/service.go index 4a89155a9..bf66fe0e6 100644 --- a/internal/services/trigger/service.go +++ b/internal/services/trigger/service.go @@ -113,7 +113,8 @@ func New( stream.WithConcurrency(config.Concurrency), stream.WithHandlerOptions( stream.WithIdleTimeout(idleTimeout), - stream.WithMaxRetries(config.MaxRetries), + // retries not needed for builds which failed to trigger, can be adjusted when needed + stream.WithMaxRetries(0), )) _ = r.RegisterCreated(service.handleEventPullReqCreated) @@ -156,6 +157,7 @@ func (s *Service) trigger(ctx context.Context, repoID int64, pipeline, err := s.pipelineStore.Find(ctx, t.PipelineID) if err != nil { errs = multierror.Append(errs, err) + continue } _, err = s.triggerSvc.Trigger(ctx, pipeline, hook) diff --git a/internal/sse/sse.go b/internal/sse/sse.go new file mode 100644 index 000000000..c7750b16f --- /dev/null +++ b/internal/sse/sse.go @@ -0,0 +1,96 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package sse + +import ( + "context" + "encoding/json" + "fmt" + "strconv" + + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types/enum" +) + +// Event is a server sent event. +type Event struct { + Type enum.SSEType `json:"type"` + Data json.RawMessage `json:"data"` +} + +type Streamer interface { + // Publish publishes an event to a given space ID. + Publish(ctx context.Context, spaceID int64, eventType enum.SSEType, data any) error + + // Streams streams the events on a space ID. + Stream(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, func(context.Context) error) +} + +type pubsubStreamer struct { + pubsub pubsub.PubSub + namespace string +} + +func NewStreamer(pubsub pubsub.PubSub, namespace string) Streamer { + return &pubsubStreamer{ + pubsub: pubsub, + namespace: namespace, + } +} + +func (e *pubsubStreamer) Publish(ctx context.Context, spaceID int64, eventType enum.SSEType, data any) error { + dataSerialized, err := json.Marshal(data) + if err != nil { + return fmt.Errorf("failed to serialize data: %w", err) + } + event := Event{ + Type: eventType, + Data: dataSerialized, + } + serializedEvent, err := json.Marshal(event) + if err != nil { + return fmt.Errorf("failed to serialize event: %w", err) + } + namespaceOption := pubsub.WithPublishNamespace(e.namespace) + topic := getSpaceTopic(spaceID) + err = e.pubsub.Publish(ctx, topic, serializedEvent, namespaceOption) + if err != nil { + return fmt.Errorf("failed to publish event on pubsub: %w", err) + } + + return nil +} + +func (e *pubsubStreamer) Stream(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, func(context.Context) error) { + chEvent := make(chan *Event, 100) // TODO: check best size here + chErr := make(chan error) + g := func(payload []byte) error { + event := &Event{} + err := json.Unmarshal(payload, event) + if err != nil { + // This should never happen + return err + } + select { + case chEvent <- event: + default: + } + + return nil + } + namespaceOption := pubsub.WithChannelNamespace(e.namespace) + topic := getSpaceTopic(spaceID) + consumer := e.pubsub.Subscribe(ctx, topic, g, namespaceOption) + unsubscribeFN := func(ctx context.Context) error { + return consumer.Unsubscribe(ctx, topic) + } + + return chEvent, chErr, unsubscribeFN +} + +// getSpaceTopic creates the namespace name which will be `spaces:` +func getSpaceTopic(spaceID int64) string { + return "spaces:" + strconv.Itoa(int(spaceID)) +} diff --git a/internal/pipeline/events/wire.go b/internal/sse/wire.go similarity index 72% rename from internal/pipeline/events/wire.go rename to internal/sse/wire.go index a848fc4bb..08c3f4ac3 100644 --- a/internal/pipeline/events/wire.go +++ b/internal/sse/wire.go @@ -2,7 +2,7 @@ // Use of this source code is governed by the Polyform Free Trial License // that can be found in the LICENSE.md file for this repository. -package events +package sse import ( "github.com/harness/gitness/pubsub" @@ -15,9 +15,7 @@ var WireSet = wire.NewSet( ProvideEventsStreaming, ) -func ProvideEventsStreaming(pubsub pubsub.PubSub) EventsStreamer { - return &event{ - pubsub: pubsub, - topic: "events", - } +func ProvideEventsStreaming(pubsub pubsub.PubSub) Streamer { + const namespace = "sse" + return NewStreamer(pubsub, namespace) } diff --git a/internal/store/database.go b/internal/store/database.go index d9058cc0e..a7a325657 100644 --- a/internal/store/database.go +++ b/internal/store/database.go @@ -560,10 +560,6 @@ type ( // Update tries to update an execution. Update(ctx context.Context, execution *types.Execution) error - // UpdateOptLock updates the execution using the optimistic locking mechanism. - UpdateOptLock(ctx context.Context, execution *types.Execution, - mutateFn func(execution *types.Execution) error) (*types.Execution, error) - // List lists the executions for a given pipeline ID List(ctx context.Context, pipelineID int64, pagination types.Pagination) ([]*types.Execution, error) diff --git a/internal/store/database/execution.go b/internal/store/database/execution.go index 49533cc18..74e5246d8 100644 --- a/internal/store/database/execution.go +++ b/internal/store/database/execution.go @@ -38,11 +38,12 @@ type executionStore struct { type execution struct { ID int64 `db:"execution_id"` PipelineID int64 `db:"execution_pipeline_id"` + CreatedBy int64 `db:"execution_created_by"` RepoID int64 `db:"execution_repo_id"` Trigger string `db:"execution_trigger"` Number int64 `db:"execution_number"` Parent int64 `db:"execution_parent"` - Status string `db:"execution_status"` + Status enum.CIStatus `db:"execution_status"` Error string `db:"execution_error"` Event string `db:"execution_event"` Action string `db:"execution_action"` @@ -77,6 +78,7 @@ const ( executionColumns = ` execution_id ,execution_pipeline_id + ,execution_created_by ,execution_repo_id ,execution_trigger ,execution_number @@ -149,6 +151,7 @@ func (s *executionStore) Create(ctx context.Context, execution *types.Execution) INSERT INTO executions ( execution_pipeline_id ,execution_repo_id + ,execution_created_by ,execution_trigger ,execution_number ,execution_parent @@ -184,6 +187,7 @@ func (s *executionStore) Create(ctx context.Context, execution *types.Execution) ) VALUES ( :execution_pipeline_id ,:execution_repo_id + ,:execution_created_by ,:execution_trigger ,:execution_number ,:execution_parent @@ -284,33 +288,6 @@ func (s *executionStore) Update(ctx context.Context, e *types.Execution) error { return nil } -// UpdateOptLock updates the pipeline using the optimistic locking mechanism. -func (s *executionStore) UpdateOptLock(ctx context.Context, - execution *types.Execution, - mutateFn func(execution *types.Execution) error) (*types.Execution, error) { - for { - dup := *execution - - err := mutateFn(&dup) - if err != nil { - return nil, err - } - - err = s.Update(ctx, &dup) - if err == nil { - return &dup, nil - } - if !errors.Is(err, gitness_store.ErrVersionConflict) { - return nil, err - } - - execution, err = s.FindByNumber(ctx, execution.PipelineID, execution.Number) - if err != nil { - return nil, err - } - } -} - // List lists the executions for a given pipeline ID. // It orders them in descending order of execution number. func (s *executionStore) List( diff --git a/internal/store/database/execution_map.go b/internal/store/database/execution_map.go index 526fbaf54..51104e458 100644 --- a/internal/store/database/execution_map.go +++ b/internal/store/database/execution_map.go @@ -13,6 +13,7 @@ func mapInternalToExecution(in *execution) (*types.Execution, error) { return &types.Execution{ ID: in.ID, PipelineID: in.PipelineID, + CreatedBy: in.CreatedBy, RepoID: in.RepoID, Trigger: in.Trigger, Number: in.Number, @@ -53,6 +54,7 @@ func mapExecutionToInternal(in *types.Execution) *execution { return &execution{ ID: in.ID, PipelineID: in.PipelineID, + CreatedBy: in.CreatedBy, RepoID: in.RepoID, Trigger: in.Trigger, Number: in.Number, diff --git a/internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql b/internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql new file mode 100644 index 000000000..db6a8e943 --- /dev/null +++ b/internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE repositories + DROP CONSTRAINT fk_repo_importing_job_uid, + DROP COLUMN repo_importing_job_uid; diff --git a/internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql b/internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql new file mode 100644 index 000000000..4f44adcc9 --- /dev/null +++ b/internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql @@ -0,0 +1,7 @@ +ALTER TABLE repositories + ADD COLUMN repo_importing_job_uid TEXT, + ADD CONSTRAINT fk_repo_importing_job_uid + FOREIGN KEY (repo_importing_job_uid) + REFERENCES jobs(job_uid) + ON DELETE SET NULL + ON UPDATE NO ACTION; diff --git a/internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql b/internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql new file mode 100644 index 000000000..3200f9ccc --- /dev/null +++ b/internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql @@ -0,0 +1,10 @@ +DROP TABLE pipelines; +DROP TABLE executions; +DROP TABLE stages; +DROP TABLE secrets; +DROP TABLE steps; +DROP TABLE logs; +DROP TABLE plugins; +DROP TABLE connectors; +DROP TABLE templates; +DROP TABLE triggers; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql b/internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql new file mode 100644 index 000000000..8e6e07dd6 --- /dev/null +++ b/internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql @@ -0,0 +1,206 @@ +CREATE TABLE pipelines ( + pipeline_id SERIAL PRIMARY KEY, + pipeline_description TEXT NOT NULL, + pipeline_uid TEXT NOT NULL, + pipeline_seq INTEGER NOT NULL DEFAULT 0, + pipeline_disabled BOOLEAN NOT NULL, + pipeline_repo_id INTEGER NOT NULL, + pipeline_default_branch TEXT NOT NULL, + pipeline_created_by INTEGER NOT NULL, + pipeline_config_path TEXT NOT NULL, + pipeline_created BIGINT NOT NULL, + pipeline_updated BIGINT NOT NULL, + pipeline_version INTEGER NOT NULL, + UNIQUE (pipeline_repo_id, pipeline_uid), + CONSTRAINT fk_pipelines_repo_id FOREIGN KEY (pipeline_repo_id) + REFERENCES repositories (repo_id) ON DELETE CASCADE, + CONSTRAINT fk_pipelines_created_by FOREIGN KEY (pipeline_created_by) + REFERENCES principals (principal_id) ON DELETE NO ACTION +); + +CREATE TABLE executions ( + execution_id SERIAL PRIMARY KEY, + execution_pipeline_id INTEGER NOT NULL, + execution_repo_id INTEGER NOT NULL, + execution_created_by INTEGER NOT NULL, + execution_trigger TEXT NOT NULL, + execution_number INTEGER NOT NULL, + execution_parent INTEGER NOT NULL, + execution_status TEXT NOT NULL, + execution_error TEXT NOT NULL, + execution_event TEXT NOT NULL, + execution_action TEXT NOT NULL, + execution_link TEXT NOT NULL, + execution_timestamp INTEGER NOT NULL, + execution_title TEXT NOT NULL, + execution_message TEXT NOT NULL, + execution_before TEXT NOT NULL, + execution_after TEXT NOT NULL, + execution_ref TEXT NOT NULL, + execution_source_repo TEXT NOT NULL, + execution_source TEXT NOT NULL, + execution_target TEXT NOT NULL, + execution_author TEXT NOT NULL, + execution_author_name TEXT NOT NULL, + execution_author_email TEXT NOT NULL, + execution_author_avatar TEXT NOT NULL, + execution_sender TEXT NOT NULL, + execution_params TEXT NOT NULL, + execution_cron TEXT NOT NULL, + execution_deploy TEXT NOT NULL, + execution_deploy_id INTEGER NOT NULL, + execution_debug BOOLEAN NOT NULL DEFAULT false, + execution_started BIGINT NOT NULL, + execution_finished BIGINT NOT NULL, + execution_created BIGINT NOT NULL, + execution_updated BIGINT NOT NULL, + execution_version INTEGER NOT NULL, + UNIQUE (execution_pipeline_id, execution_number), + CONSTRAINT fk_executions_pipeline_id FOREIGN KEY (execution_pipeline_id) + REFERENCES pipelines (pipeline_id) ON DELETE CASCADE, + CONSTRAINT fk_executions_repo_id FOREIGN KEY (execution_repo_id) + REFERENCES repositories (repo_id) ON DELETE CASCADE, + CONSTRAINT fk_executions_created_by FOREIGN KEY (execution_created_by) + REFERENCES principals (principal_id) ON DELETE NO ACTION +); + +CREATE TABLE secrets ( + secret_id SERIAL PRIMARY KEY, + secret_uid TEXT NOT NULL, + secret_space_id INTEGER NOT NULL, + secret_description TEXT NOT NULL, + secret_data BYTEA NOT NULL, + secret_created_by INTEGER NOT NULL, + secret_created BIGINT NOT NULL, + secret_updated BIGINT NOT NULL, + secret_version INTEGER NOT NULL, + UNIQUE (secret_space_id, secret_uid), + CONSTRAINT fk_secrets_space_id FOREIGN KEY (secret_space_id) + REFERENCES spaces (space_id) ON DELETE CASCADE, + CONSTRAINT fk_secrets_created_by FOREIGN KEY (secret_created_by) + REFERENCES principals (principal_id) ON DELETE NO ACTION +); + +CREATE TABLE stages ( + stage_id SERIAL PRIMARY KEY, + stage_execution_id INTEGER NOT NULL, + stage_repo_id INTEGER NOT NULL, + stage_number INTEGER NOT NULL, + stage_kind TEXT NOT NULL, + stage_type TEXT NOT NULL, + stage_name TEXT NOT NULL, + stage_status TEXT NOT NULL, + stage_error TEXT NOT NULL, + stage_parent_group_id INTEGER NOT NULL, + stage_errignore BOOLEAN NOT NULL, + stage_exit_code INTEGER NOT NULL, + stage_limit INTEGER NOT NULL, + stage_os TEXT NOT NULL, + stage_arch TEXT NOT NULL, + stage_variant TEXT NOT NULL, + stage_kernel TEXT NOT NULL, + stage_machine TEXT NOT NULL, + stage_started BIGINT NOT NULL, + stage_stopped BIGINT NOT NULL, + stage_created BIGINT NOT NULL, + stage_updated BIGINT NOT NULL, + stage_version INTEGER NOT NULL, + stage_on_success BOOLEAN NOT NULL, + stage_on_failure BOOLEAN NOT NULL, + stage_depends_on TEXT NOT NULL, + stage_labels TEXT NOT NULL, + stage_limit_repo INTEGER NOT NULL DEFAULT 0, + UNIQUE (stage_execution_id, stage_number), + CONSTRAINT fk_stages_execution_id FOREIGN KEY (stage_execution_id) + REFERENCES executions (execution_id) ON DELETE CASCADE +); + +CREATE INDEX ix_stage_in_progress ON stages (stage_status) +WHERE stage_status IN ('pending', 'running'); + +CREATE TABLE steps ( + step_id SERIAL PRIMARY KEY, + step_stage_id INTEGER NOT NULL, + step_number INTEGER NOT NULL, + step_name TEXT NOT NULL, + step_status TEXT NOT NULL, + step_error TEXT NOT NULL, + step_parent_group_id INTEGER NOT NULL, + step_errignore BOOLEAN NOT NULL, + step_exit_code INTEGER NOT NULL, + step_started BIGINT NOT NULL, + step_stopped BIGINT NOT NULL, + step_version INTEGER NOT NULL, + step_depends_on TEXT NOT NULL, + step_image TEXT NOT NULL, + step_detached BOOLEAN NOT NULL, + step_schema TEXT NOT NULL, + UNIQUE (step_stage_id, step_number), + CONSTRAINT fk_steps_stage_id FOREIGN KEY (step_stage_id) + REFERENCES stages (stage_id) ON DELETE CASCADE +); + +CREATE TABLE logs ( + log_id SERIAL PRIMARY KEY, + log_data BYTEA NOT NULL, + CONSTRAINT fk_logs_id FOREIGN KEY (log_id) + REFERENCES steps (step_id) ON DELETE CASCADE +); + +CREATE TABLE connectors ( + connector_id SERIAL PRIMARY KEY, + connector_uid TEXT NOT NULL, + connector_description TEXT NOT NULL, + connector_type TEXT NOT NULL, + connector_space_id INTEGER NOT NULL, + connector_data TEXT NOT NULL, + connector_created BIGINT NOT NULL, + connector_updated BIGINT NOT NULL, + connector_version INTEGER NOT NULL, + UNIQUE (connector_space_id, connector_uid), + CONSTRAINT fk_connectors_space_id FOREIGN KEY (connector_space_id) + REFERENCES spaces (space_id) ON DELETE CASCADE +); + +CREATE TABLE templates ( + template_id SERIAL PRIMARY KEY, + template_uid TEXT NOT NULL, + template_description TEXT NOT NULL, + template_space_id INTEGER NOT NULL, + template_data TEXT NOT NULL, + template_created BIGINT NOT NULL, + template_updated BIGINT NOT NULL, + template_version INTEGER NOT NULL, + UNIQUE (template_space_id, template_uid), + CONSTRAINT fk_templates_space_id FOREIGN KEY (template_space_id) + REFERENCES spaces (space_id) ON DELETE CASCADE +); + +CREATE TABLE triggers ( + trigger_id SERIAL PRIMARY KEY, + trigger_uid TEXT NOT NULL, + trigger_pipeline_id INTEGER NOT NULL, + trigger_type TEXT NOT NULL, + trigger_repo_id INTEGER NOT NULL, + trigger_secret TEXT NOT NULL, + trigger_description TEXT NOT NULL, + trigger_disabled BOOLEAN NOT NULL, + trigger_created_by INTEGER NOT NULL, + trigger_actions TEXT NOT NULL, + trigger_created BIGINT NOT NULL, + trigger_updated BIGINT NOT NULL, + trigger_version INTEGER NOT NULL, + UNIQUE (trigger_pipeline_id, trigger_uid), + CONSTRAINT fk_triggers_pipeline_id FOREIGN KEY (trigger_pipeline_id) + REFERENCES pipelines (pipeline_id) ON DELETE CASCADE, + CONSTRAINT fk_triggers_repo_id FOREIGN KEY (trigger_repo_id) + REFERENCES repositories (repo_id) ON DELETE CASCADE +); + +CREATE TABLE plugins ( + plugin_uid TEXT NOT NULL, + plugin_description TEXT NOT NULL, + plugin_logo TEXT NOT NULL, + plugin_spec BYTEA NOT NULL, + UNIQUE (plugin_uid) +); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql new file mode 100644 index 000000000..799a215c6 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql @@ -0,0 +1 @@ +ALTER TABLE repositories ADD COLUMN repo_importing_job_uid TEXT; diff --git a/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql new file mode 100644 index 000000000..9849283e9 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql @@ -0,0 +1 @@ +ALTER TABLE repositories DROP COLUMN repo_importing_job_uid; diff --git a/internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql b/internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql new file mode 100644 index 000000000..3200f9ccc --- /dev/null +++ b/internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql @@ -0,0 +1,10 @@ +DROP TABLE pipelines; +DROP TABLE executions; +DROP TABLE stages; +DROP TABLE secrets; +DROP TABLE steps; +DROP TABLE logs; +DROP TABLE plugins; +DROP TABLE connectors; +DROP TABLE templates; +DROP TABLE triggers; \ No newline at end of file diff --git a/internal/store/database/migrate/ci/ci_migrations.sql b/internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql similarity index 83% rename from internal/store/database/migrate/ci/ci_migrations.sql rename to internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql index 8ccd773d3..8caad8d49 100644 --- a/internal/store/database/migrate/ci/ci_migrations.sql +++ b/internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql @@ -1,20 +1,12 @@ -DROP TABLE IF exists pipelines; -DROP TABLE IF exists executions; -DROP TABLE IF exists stages; -DROP TABLE IF exists secrets; -DROP TABLE IF exists steps; -DROP TABLE IF exists logs; -DROP TABLE IF exists plugins; -DROP TABLE IF exists connectors; -DROP TABLE IF exists templates; -DROP TABLE IF exists triggers; CREATE TABLE pipelines ( pipeline_id INTEGER PRIMARY KEY AUTOINCREMENT ,pipeline_description TEXT NOT NULL ,pipeline_uid TEXT NOT NULL ,pipeline_seq INTEGER NOT NULL DEFAULT 0 + ,pipeline_disabled BOOLEAN NOT NULL ,pipeline_repo_id INTEGER NOT NULL ,pipeline_default_branch TEXT NOT NULL + ,pipeline_created_by INTEGER NOT NULL ,pipeline_config_path TEXT NOT NULL ,pipeline_created INTEGER NOT NULL ,pipeline_updated INTEGER NOT NULL @@ -28,12 +20,19 @@ CREATE TABLE pipelines ( REFERENCES repositories (repo_id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE CASCADE + + -- Foreign key to principals table + ,CONSTRAINT fk_pipelines_created_by FOREIGN KEY (pipeline_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION ); CREATE TABLE executions ( execution_id INTEGER PRIMARY KEY AUTOINCREMENT ,execution_pipeline_id INTEGER NOT NULL ,execution_repo_id INTEGER NOT NULL + ,execution_created_by INTEGER NOT NULL ,execution_trigger TEXT NOT NULL ,execution_number INTEGER NOT NULL ,execution_parent INTEGER NOT NULL @@ -81,6 +80,12 @@ CREATE TABLE executions ( REFERENCES repositories (repo_id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE CASCADE + + -- Foreign key to principals table + ,CONSTRAINT fk_executions_created_by FOREIGN KEY (execution_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION ); CREATE TABLE secrets ( @@ -92,6 +97,7 @@ CREATE TABLE secrets ( ,secret_created INTEGER NOT NULL ,secret_updated INTEGER NOT NULL ,secret_version INTEGER NOT NULL + ,secret_created_by INTEGER NOT NULL -- Ensure unique combination of space ID and UID ,UNIQUE (secret_space_id, secret_uid) @@ -101,6 +107,12 @@ CREATE TABLE secrets ( REFERENCES spaces (space_id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE CASCADE + + -- Foreign key to principals table + ,CONSTRAINT fk_secrets_created_by FOREIGN KEY (secret_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION ); CREATE TABLE stages ( @@ -189,34 +201,6 @@ CREATE TABLE logs ( ON DELETE CASCADE ); --- Insert some pipelines -INSERT INTO pipelines ( - pipeline_id, pipeline_description, pipeline_uid, pipeline_seq, - pipeline_repo_id, pipeline_default_branch, - pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version -) VALUES ( - 1, 'Sample Pipeline 1', 'pipeline_uid_1', 2, 1, - 'main', 'config_path_1', 1678932000, 1678932100, 1 -); - -INSERT INTO pipelines ( - pipeline_id, pipeline_description, pipeline_uid, pipeline_seq, - pipeline_repo_id, pipeline_default_branch, - pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version -) VALUES ( - 2, 'Sample Pipeline 2', 'pipeline_uid_2', 0, 1, - 'develop', 'config_path_2', 1678932200, 1678932300, 1 -); - -INSERT INTO pipelines ( - pipeline_id, pipeline_description, pipeline_uid, pipeline_seq, - pipeline_repo_id, pipeline_default_branch, - pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version -) VALUES ( - 3, 'Sample Pipeline 3', 'pipeline_uid_3', 0, 1, - 'develop', 'config_path_2', 1678932200000, 1678932300000, 1 -); - CREATE TABLE connectors ( connector_id INTEGER PRIMARY KEY AUTOINCREMENT ,connector_uid TEXT NOT NULL @@ -262,10 +246,11 @@ CREATE TABLE triggers ( trigger_id INTEGER PRIMARY KEY AUTOINCREMENT ,trigger_uid TEXT NOT NULL ,trigger_pipeline_id INTEGER NOT NULL + ,trigger_type TEXT NOT NULL ,trigger_repo_id INTEGER NOT NULL ,trigger_secret TEXT NOT NULL ,trigger_description TEXT NOT NULL - ,trigger_enabled BOOLEAN NOT NULL + ,trigger_disabled BOOLEAN NOT NULL ,trigger_created_by INTEGER NOT NULL ,trigger_actions TEXT NOT NULL ,trigger_created INTEGER NOT NULL @@ -296,20 +281,4 @@ CREATE TABLE plugins ( -- Ensure unique plugin names ,UNIQUE(plugin_uid) -); - -INSERT INTO plugins (plugin_uid, plugin_description, plugin_logo, plugin_spec) -VALUES - ('plugins/slack', 'A sample slack plugin', 'slack.png', - 'inputs: - channel: - type: string - token: - type: string - - steps: - - type: script - spec: - image: plugins/slack - envs: - PLUGIN_CHANNEL: <+ inputs.channel >'); \ No newline at end of file +); \ No newline at end of file diff --git a/internal/store/database/pipeline.go b/internal/store/database/pipeline.go index 0213ab1f5..ff956503d 100644 --- a/internal/store/database/pipeline.go +++ b/internal/store/database/pipeline.go @@ -31,6 +31,7 @@ const ( pipelineColumns = ` pipeline_id ,pipeline_description + ,pipeline_created_by ,pipeline_uid ,pipeline_seq ,pipeline_repo_id @@ -87,6 +88,8 @@ func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) er ,pipeline_uid ,pipeline_seq ,pipeline_repo_id + ,pipeline_disabled + ,pipeline_created_by ,pipeline_default_branch ,pipeline_config_path ,pipeline_created @@ -97,6 +100,8 @@ func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) er :pipeline_uid, :pipeline_seq, :pipeline_repo_id, + :pipeline_disabled, + :pipeline_created_by, :pipeline_default_branch, :pipeline_config_path, :pipeline_created, @@ -229,7 +234,7 @@ func (s *pipelineStore) ListLatest( ` // Create a subquery to get max execution IDs for each unique execution pipeline ID. subquery := database.Builder. - Select("execution_pipeline_id, execution_id, MAX(execution_number)"). + Select("execution_pipeline_id, MAX(execution_id) AS execution_id"). From("executions"). Where("execution_repo_id = ?"). GroupBy("execution_pipeline_id") diff --git a/internal/store/database/pipeline_join.go b/internal/store/database/pipeline_join.go index b179cf7d2..02577956b 100644 --- a/internal/store/database/pipeline_join.go +++ b/internal/store/database/pipeline_join.go @@ -8,6 +8,7 @@ import ( "database/sql" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" ) // pipelineExecutionjoin struct represents a joined row between pipelines and executions @@ -15,6 +16,7 @@ type pipelineExecutionJoin struct { *types.Pipeline ID sql.NullInt64 `db:"execution_id"` PipelineID sql.NullInt64 `db:"execution_pipeline_id"` + Action sql.NullString `db:"execution_action"` Message sql.NullString `db:"execution_message"` After sql.NullString `db:"execution_after"` RepoID sql.NullInt64 `db:"execution_repo_id"` @@ -56,11 +58,12 @@ func convertPipelineJoin(join *pipelineExecutionJoin) *types.Pipeline { ID: join.ID.Int64, PipelineID: join.PipelineID.Int64, RepoID: join.RepoID.Int64, + Action: join.Action.String, Trigger: join.Trigger.String, Number: join.Number.Int64, After: join.After.String, Message: join.Message.String, - Status: join.Status.String, + Status: enum.ParseCIStatus(join.Status.String), Error: join.Error.String, Link: join.Link.String, Timestamp: join.Timestamp.Int64, diff --git a/internal/store/database/repo.go b/internal/store/database/repo.go index 6770ce57d..fb7f8084c 100644 --- a/internal/store/database/repo.go +++ b/internal/store/database/repo.go @@ -59,8 +59,7 @@ const ( ,repo_num_closed_pulls ,repo_num_open_pulls ,repo_num_merged_pulls - ,repo_importing - ,repo_importing_job_uid` + ,repo_importing` repoSelectBaseWithJoin = ` SELECT` + repoColumnsForJoin + ` @@ -127,7 +126,6 @@ func (s *RepoStore) Create(ctx context.Context, repo *types.Repository) error { ,repo_num_open_pulls ,repo_num_merged_pulls ,repo_importing - ,repo_importing_job_uid ) values ( :repo_version ,:repo_parent_id @@ -147,7 +145,6 @@ func (s *RepoStore) Create(ctx context.Context, repo *types.Repository) error { ,:repo_num_open_pulls ,:repo_num_merged_pulls ,:repo_importing - ,:repo_importing_job_uid ) RETURNING repo_id` db := dbtx.GetAccessor(ctx, s.db) @@ -185,7 +182,6 @@ func (s *RepoStore) Update(ctx context.Context, repo *types.Repository) error { ,repo_num_open_pulls = :repo_num_open_pulls ,repo_num_merged_pulls = :repo_num_merged_pulls ,repo_importing = :repo_importing - ,repo_importing_job_uid = :repo_importing_job_uid WHERE repo_id = :repo_id AND repo_version = :repo_version - 1` updatedAt := time.Now() diff --git a/internal/store/database/secret.go b/internal/store/database/secret.go index f49d085b4..40f7a89b8 100644 --- a/internal/store/database/secret.go +++ b/internal/store/database/secret.go @@ -31,6 +31,7 @@ const ( secret_id, secret_description, secret_space_id, + secret_created_by, secret_uid, secret_data, secret_created, @@ -82,6 +83,7 @@ func (s *secretStore) Create(ctx context.Context, secret *types.Secret) error { INSERT INTO secrets ( secret_description, secret_space_id, + secret_created_by, secret_uid, secret_data, secret_created, @@ -90,6 +92,7 @@ func (s *secretStore) Create(ctx context.Context, secret *types.Secret) error { ) VALUES ( :secret_description, :secret_space_id, + :secret_created_by, :secret_uid, :secret_data, :secret_created, diff --git a/internal/store/database/stage.go b/internal/store/database/stage.go index 8e9ba540f..38764d410 100644 --- a/internal/store/database/stage.go +++ b/internal/store/database/stage.go @@ -14,6 +14,7 @@ import ( "github.com/harness/gitness/store/database" "github.com/harness/gitness/store/database/dbtx" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" "github.com/jmoiron/sqlx" sqlxtypes "github.com/jmoiron/sqlx/types" @@ -61,7 +62,7 @@ type stage struct { Name string `db:"stage_name"` Kind string `db:"stage_kind"` Type string `db:"stage_type"` - Status string `db:"stage_status"` + Status enum.CIStatus `db:"stage_status"` Error string `db:"stage_error"` ParentGroupID int64 `db:"stage_parent_group_id"` ErrIgnore bool `db:"stage_errignore"` @@ -265,9 +266,17 @@ func (s *stageStore) Update(ctx context.Context, st *types.Stage) error { SET stage_status = :stage_status ,stage_machine = :stage_machine + ,stage_started = :stage_started + ,stage_stopped = :stage_stopped + ,stage_exit_code = :stage_exit_code ,stage_updated = :stage_updated ,stage_version = :stage_version ,stage_error = :stage_error + ,stage_on_success = :stage_on_success + ,stage_on_failure = :stage_on_failure + ,stage_errignore = :stage_errignore + ,stage_depends_on = :stage_depends_on + ,stage_labels = :stage_labels WHERE stage_id = :stage_id AND stage_version = :stage_version - 1` updatedAt := time.Now() steps := st.Steps diff --git a/internal/store/database/stage_map.go b/internal/store/database/stage_map.go index 003f1228b..a32f6c10b 100644 --- a/internal/store/database/stage_map.go +++ b/internal/store/database/stage_map.go @@ -10,6 +10,7 @@ import ( "fmt" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" sqlxtypes "github.com/jmoiron/sqlx/types" "github.com/pkg/errors" @@ -46,7 +47,7 @@ func convertFromNullStep(nullstep *nullstep) (*types.Step, error) { StageID: nullstep.StageID.Int64, Number: nullstep.Number.Int64, Name: nullstep.Name.String, - Status: nullstep.Status.String, + Status: enum.ParseCIStatus(nullstep.Status.String), Error: nullstep.Error.String, ErrIgnore: nullstep.ErrIgnore.Bool, ExitCode: int(nullstep.ExitCode.Int64), diff --git a/internal/store/database/step.go b/internal/store/database/step.go index 2e04ed750..c05a039ae 100644 --- a/internal/store/database/step.go +++ b/internal/store/database/step.go @@ -13,6 +13,7 @@ import ( "github.com/harness/gitness/store/database" "github.com/harness/gitness/store/database/dbtx" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" "github.com/jmoiron/sqlx" sqlxtypes "github.com/jmoiron/sqlx/types" @@ -46,7 +47,7 @@ type step struct { Number int64 `db:"step_number"` ParentGroupID int64 `db:"step_parent_group_id"` Name string `db:"step_name"` - Status string `db:"step_status"` + Status enum.CIStatus `db:"step_status"` Error string `db:"step_error"` ErrIgnore bool `db:"step_errignore"` ExitCode int `db:"step_exit_code"` diff --git a/internal/store/database/trigger.go b/internal/store/database/trigger.go index b0c61c39a..2e199b8c3 100644 --- a/internal/store/database/trigger.go +++ b/internal/store/database/trigger.go @@ -29,11 +29,12 @@ type trigger struct { ID int64 `db:"trigger_id"` UID string `db:"trigger_uid"` Description string `db:"trigger_description"` + Type string `db:"trigger_type"` Secret string `db:"trigger_secret"` PipelineID int64 `db:"trigger_pipeline_id"` RepoID int64 `db:"trigger_repo_id"` CreatedBy int64 `db:"trigger_created_by"` - Enabled bool `db:"trigger_enabled"` + Disabled bool `db:"trigger_disabled"` Actions sqlxtypes.JSONText `db:"trigger_actions"` Created int64 `db:"trigger_created"` Updated int64 `db:"trigger_updated"` @@ -50,11 +51,12 @@ func mapInternalToTrigger(trigger *trigger) (*types.Trigger, error) { return &types.Trigger{ ID: trigger.ID, Description: trigger.Description, + Type: trigger.Type, Secret: trigger.Secret, PipelineID: trigger.PipelineID, RepoID: trigger.RepoID, CreatedBy: trigger.CreatedBy, - Enabled: trigger.Enabled, + Disabled: trigger.Disabled, Actions: actions, UID: trigger.UID, Created: trigger.Created, @@ -80,11 +82,12 @@ func mapTriggerToInternal(t *types.Trigger) *trigger { ID: t.ID, UID: t.UID, Description: t.Description, + Type: t.Type, PipelineID: t.PipelineID, Secret: t.Secret, RepoID: t.RepoID, CreatedBy: t.CreatedBy, - Enabled: t.Enabled, + Disabled: t.Disabled, Actions: EncodeToSQLXJSON(t.Actions), Created: t.Created, Updated: t.Updated, @@ -107,7 +110,7 @@ const ( triggerColumns = ` trigger_id ,trigger_uid - ,trigger_enabled + ,trigger_disabled ,trigger_actions ,trigger_description ,trigger_pipeline_id @@ -139,7 +142,8 @@ func (s *triggerStore) Create(ctx context.Context, t *types.Trigger) error { trigger_uid ,trigger_description ,trigger_actions - ,trigger_enabled + ,trigger_disabled + ,trigger_type ,trigger_secret ,trigger_created_by ,trigger_pipeline_id @@ -151,7 +155,8 @@ func (s *triggerStore) Create(ctx context.Context, t *types.Trigger) error { :trigger_uid ,:trigger_description ,:trigger_actions - ,:trigger_enabled + ,:trigger_disabled + ,:trigger_type ,:trigger_secret ,:trigger_created_by ,:trigger_pipeline_id @@ -286,7 +291,7 @@ func (s *triggerStore) ListAllEnabled( stmt := database.Builder. Select(triggerColumns). From("triggers"). - Where("trigger_repo_id = ? AND trigger_enabled = true", fmt.Sprint(repoID)) + Where("trigger_repo_id = ? AND trigger_disabled = false", fmt.Sprint(repoID)) sql, args, err := stmt.ToSql() if err != nil { diff --git a/pubsub/inmem.go b/pubsub/inmem.go index 6770b10cb..9b6497e3f 100644 --- a/pubsub/inmem.go +++ b/pubsub/inmem.go @@ -98,9 +98,12 @@ func (r *InMemory) Publish(ctx context.Context, topic string, payload []byte, op } topic = formatTopic(pubConfig.app, pubConfig.namespace, topic) + wg := sync.WaitGroup{} for _, sub := range r.registry { if slices.Contains(sub.topics, topic) && !sub.isClosed() { + wg.Add(1) go func(subscriber *inMemorySubscriber) { + defer wg.Done() // timer is based on subscriber data t := time.NewTimer(subscriber.config.sendTimeout) defer t.Stop() @@ -118,6 +121,10 @@ func (r *InMemory) Publish(ctx context.Context, topic string, payload []byte, op } } + // Wait for all subscribers to complete + // Otherwise, we might fail notifying some subscribers due to context completion. + wg.Wait() + return nil } diff --git a/types/check.go b/types/check.go index 825e080d1..314a6a89a 100644 --- a/types/check.go +++ b/types/check.go @@ -53,3 +53,11 @@ type ReqCheck struct { type CheckPayloadText struct { Details string `json:"details"` } + +// CheckPayloadInternal is for internal use for more seamless integration for +// gitness CI status checks. +type CheckPayloadInternal struct { + Number int64 `json:"execution_number"` + RepoID int64 `json:"repo_id"` + PipelineID int64 `json:"pipeline_id"` +} diff --git a/types/enum/check.go b/types/enum/check.go index e348d6e9f..b6ec86cb0 100644 --- a/types/enum/check.go +++ b/types/enum/check.go @@ -44,10 +44,12 @@ const ( CheckPayloadKindEmpty CheckPayloadKind = "" CheckPayloadKindRaw CheckPayloadKind = "raw" CheckPayloadKindMarkdown CheckPayloadKind = "markdown" + CheckPayloadKindPipeline CheckPayloadKind = "pipeline" ) var checkPayloadTypes = sortEnum([]CheckPayloadKind{ CheckPayloadKindEmpty, CheckPayloadKindRaw, CheckPayloadKindMarkdown, + CheckPayloadKindPipeline, }) diff --git a/types/enum/ci_status.go b/types/enum/ci_status.go index 2a60342d6..e0e042555 100644 --- a/types/enum/ci_status.go +++ b/types/enum/ci_status.go @@ -5,15 +5,66 @@ // Status types for CI. package enum -const ( - CIStatusSkipped = "skipped" - CIStatusBlocked = "blocked" - CIStatusDeclined = "declined" - CIStatusWaitingOnDeps = "waiting_on_dependencies" - CIStatusPending = "pending" - CIStatusRunning = "running" - CIStatusSuccess = "success" - CIStatusFailure = "failure" - CIStatusKilled = "killed" - CIStatusError = "error" +import ( + "strings" ) + +// CIStatus defines the different kinds of CI statuses for +// stages, steps and executions. +type CIStatus string + +const ( + CIStatusSkipped CIStatus = "skipped" + CIStatusBlocked CIStatus = "blocked" + CIStatusDeclined CIStatus = "declined" + CIStatusWaitingOnDeps CIStatus = "waiting_on_dependencies" + CIStatusPending CIStatus = "pending" + CIStatusRunning CIStatus = "running" + CIStatusSuccess CIStatus = "success" + CIStatusFailure CIStatus = "failure" + CIStatusKilled CIStatus = "killed" + CIStatusError CIStatus = "error" +) + +func (status CIStatus) ConvertToCheckStatus() CheckStatus { + if status == CIStatusPending || status == CIStatusWaitingOnDeps { + return CheckStatusPending + } + if status == CIStatusSuccess || status == CIStatusSkipped { + return CheckStatusSuccess + } + if status == CIStatusFailure { + return CheckStatusFailure + } + if status == CIStatusRunning { + return CheckStatusRunning + } + return CheckStatusError +} + +// ParseCIStatus converts the status from a string to typed enum. +// If the match is not exact, will just return default error status +// instead of explicitly returning not found error. +func ParseCIStatus(status string) CIStatus { + switch strings.ToLower(status) { + case "skipped", "blocked", "declined", "waiting_on_dependencies", "pending", "running", "success", "failure", "killed", "error": + return CIStatus(strings.ToLower(status)) + case "": // just in case status is not passed through + return CIStatusPending + default: + return CIStatusError + } +} + +// IsDone returns true if the build has a completed state. +func (status CIStatus) IsDone() bool { + switch status { + case CIStatusWaitingOnDeps, + CIStatusPending, + CIStatusRunning, + CIStatusBlocked: + return false + default: + return true + } +} diff --git a/types/enum/event_types.go b/types/enum/event_types.go deleted file mode 100644 index 18d0d171c..000000000 --- a/types/enum/event_types.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 Harness Inc. All rights reserved. -// Use of this source code is governed by the Polyform Free Trial License -// that can be found in the LICENSE.md file for this repository. - -// Enums for event types delivered to the event stream for the UI -package enum - -// EventType defines the kind of event -type EventType string - -const ( - ExecutionUpdated = "execution_updated" - ExecutionRunning = "execution_running" - ExecutionCompleted = "execution_completed" -) diff --git a/types/enum/sse.go b/types/enum/sse.go new file mode 100644 index 000000000..a1fefe168 --- /dev/null +++ b/types/enum/sse.go @@ -0,0 +1,18 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +// Enums for event types delivered to the event stream for the UI +package enum + +// SSEType defines the kind of server sent event +type SSEType string + +const ( + SSETypeExecutionUpdated = "execution_updated" + SSETypeExecutionRunning = "execution_running" + SSETypeExecutionCompleted = "execution_completed" + SSETypeExecutionCanceled = "execution_canceled" + + SSETypeRepositoryImportCompleted = "repository_import_completed" +) diff --git a/types/execution.go b/types/execution.go index 4e85879e9..c3b9bff61 100644 --- a/types/execution.go +++ b/types/execution.go @@ -4,15 +4,18 @@ package types +import "github.com/harness/gitness/types/enum" + // Execution represents an instance of a pipeline execution. type Execution struct { ID int64 `json:"-"` PipelineID int64 `json:"pipeline_id"` + CreatedBy int64 `json:"created_by"` RepoID int64 `json:"repo_id"` Trigger string `json:"trigger,omitempty"` Number int64 `json:"number"` Parent int64 `json:"parent,omitempty"` - Status string `json:"status"` + Status enum.CIStatus `json:"status"` Error string `json:"error,omitempty"` Event string `json:"event,omitempty"` Action string `json:"action,omitempty"` diff --git a/types/pipeline.go b/types/pipeline.go index c3256f28d..05f27823b 100644 --- a/types/pipeline.go +++ b/types/pipeline.go @@ -8,6 +8,8 @@ type Pipeline struct { ID int64 `db:"pipeline_id" json:"id"` Description string `db:"pipeline_description" json:"description"` UID string `db:"pipeline_uid" json:"uid"` + Disabled bool `db:"pipeline_disabled" json:"disabled"` + CreatedBy int64 `db:"pipeline_created_by" json:"created_by"` Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline RepoID int64 `db:"pipeline_repo_id" json:"repo_id"` DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"` diff --git a/types/repo.go b/types/repo.go index daf913e02..302e526e5 100644 --- a/types/repo.go +++ b/types/repo.go @@ -28,15 +28,13 @@ type Repository struct { ForkID int64 `db:"repo_fork_id" json:"fork_id"` PullReqSeq int64 `db:"repo_pullreq_seq" json:"-"` - // TODO: Check if we want to keep those values here NumForks int `db:"repo_num_forks" json:"num_forks"` NumPulls int `db:"repo_num_pulls" json:"num_pulls"` NumClosedPulls int `db:"repo_num_closed_pulls" json:"num_closed_pulls"` NumOpenPulls int `db:"repo_num_open_pulls" json:"num_open_pulls"` NumMergedPulls int `db:"repo_num_merged_pulls" json:"num_merged_pulls"` - Importing bool `db:"repo_importing" json:"importing"` - ImportingJobUID *string `db:"repo_importing_job_uid" json:"-"` + Importing bool `db:"repo_importing" json:"importing"` // git urls GitURL string `db:"-" json:"git_url"` diff --git a/types/secret.go b/types/secret.go index 1a7107db8..23c36cd30 100644 --- a/types/secret.go +++ b/types/secret.go @@ -8,6 +8,7 @@ type Secret struct { ID int64 `db:"secret_id" json:"id"` Description string `db:"secret_description" json:"description"` SpaceID int64 `db:"secret_space_id" json:"space_id"` + CreatedBy int64 `db:"secret_created_by" json:"created_by"` UID string `db:"secret_uid" json:"uid"` Data string `db:"secret_data" json:"-"` Created int64 `db:"secret_created" json:"created"` diff --git a/types/stage.go b/types/stage.go index de31f06e6..1bef444a5 100644 --- a/types/stage.go +++ b/types/stage.go @@ -4,6 +4,8 @@ package types +import "github.com/harness/gitness/types/enum" + type Stage struct { ID int64 `json:"-"` ExecutionID int64 `json:"execution_id"` @@ -12,7 +14,7 @@ type Stage struct { Name string `json:"name"` Kind string `json:"kind,omitempty"` Type string `json:"type,omitempty"` - Status string `json:"status"` + Status enum.CIStatus `json:"status"` Error string `json:"error,omitempty"` ErrIgnore bool `json:"errignore,omitempty"` ExitCode int `json:"exit_code"` diff --git a/types/step.go b/types/step.go index 9d9d14b43..19c2b8da1 100644 --- a/types/step.go +++ b/types/step.go @@ -4,20 +4,22 @@ package types +import "github.com/harness/gitness/types/enum" + type Step struct { - ID int64 `json:"-"` - StageID int64 `json:"-"` - Number int64 `json:"number"` - Name string `json:"name"` - Status string `json:"status"` - Error string `json:"error,omitempty"` - ErrIgnore bool `json:"errignore,omitempty"` - ExitCode int `json:"exit_code"` - Started int64 `json:"started,omitempty"` - Stopped int64 `json:"stopped,omitempty"` - Version int64 `json:"-" db:"step_version"` - DependsOn []string `json:"depends_on,omitempty"` - Image string `json:"image,omitempty"` - Detached bool `json:"detached"` - Schema string `json:"schema,omitempty"` + ID int64 `json:"-"` + StageID int64 `json:"-"` + Number int64 `json:"number"` + Name string `json:"name"` + Status enum.CIStatus `json:"status"` + Error string `json:"error,omitempty"` + ErrIgnore bool `json:"errignore,omitempty"` + ExitCode int `json:"exit_code"` + Started int64 `json:"started,omitempty"` + Stopped int64 `json:"stopped,omitempty"` + Version int64 `json:"-" db:"step_version"` + DependsOn []string `json:"depends_on,omitempty"` + Image string `json:"image,omitempty"` + Detached bool `json:"detached"` + Schema string `json:"schema,omitempty"` } diff --git a/types/trigger.go b/types/trigger.go index c6c2ac28c..51f49cf35 100644 --- a/types/trigger.go +++ b/types/trigger.go @@ -9,11 +9,12 @@ import "github.com/harness/gitness/types/enum" type Trigger struct { ID int64 `json:"id"` Description string `json:"description"` + Type string `json:"trigger_type"` PipelineID int64 `json:"pipeline_id"` Secret string `json:"-"` RepoID int64 `json:"repo_id"` CreatedBy int64 `json:"created_by"` - Enabled bool `json:"enabled"` + Disabled bool `json:"disabled"` Actions []enum.TriggerAction `json:"actions"` UID string `json:"uid"` Created int64 `json:"created"` diff --git a/web/.eslintrc.yml b/web/.eslintrc.yml index 0ce71a8f9..b9fa07735 100644 --- a/web/.eslintrc.yml +++ b/web/.eslintrc.yml @@ -101,10 +101,6 @@ rules: - lodash.* paths: - lodash - - name: yaml - importNames: - - stringify - message: 'Please use yamlStringify from @common/utils/YamlHelperMethods instead of this' overrides: - files: diff --git a/web/src/RouteDefinitions.ts b/web/src/RouteDefinitions.ts index 1a7000138..157d40288 100644 --- a/web/src/RouteDefinitions.ts +++ b/web/src/RouteDefinitions.ts @@ -47,6 +47,7 @@ export interface CODERoutes { toCODESpaceSettings: (args: Required>) => string toCODEPipelines: (args: Required>) => string toCODEPipelineEdit: (args: Required>) => string + toCODEPipelineSettings: (args: Required>) => string toCODESecrets: (args: Required>) => string toCODEGlobalSettings: () => string @@ -98,6 +99,7 @@ export const routes: CODERoutes = { toCODESpaceSettings: ({ space }) => `/settings/${space}`, toCODEPipelines: ({ repoPath }) => `/${repoPath}/pipelines`, toCODEPipelineEdit: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}/edit`, + toCODEPipelineSettings: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}/triggers`, toCODESecrets: ({ space }) => `/secrets/${space}`, toCODEGlobalSettings: () => '/settings', diff --git a/web/src/RouteDestinations.tsx b/web/src/RouteDestinations.tsx index 60ec8dd21..e6bb9b39c 100644 --- a/web/src/RouteDestinations.tsx +++ b/web/src/RouteDestinations.tsx @@ -33,6 +33,7 @@ import Secret from 'pages/Secret/Secret' import Search from 'pages/Search/Search' import AddUpdatePipeline from 'pages/AddUpdatePipeline/AddUpdatePipeline' import { useAppContext } from 'AppContext' +import PipelineSettings from 'components/PipelineSettings/PipelineSettings' export const RouteDestinations: React.FC = React.memo(function RouteDestinations() { const { getString } = useStrings() @@ -194,6 +195,14 @@ export const RouteDestinations: React.FC = React.memo(function RouteDestinations )} + {standalone && ( + + + + + + )} + {standalone && ( diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx index 51a63792f..4b89de954 100644 --- a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx +++ b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx @@ -36,7 +36,7 @@ const CloneCredentialDialog = (props: CloneCredentialDialogProps) => { }, [mutate, showError] ) - const tokenData = standalone ? false : hooks?.useGenerateToken?.(hash, currentUser.uid, flag) + const tokenData = standalone ? false : hooks?.useGenerateToken?.(hash, currentUser?.uid, flag) useEffect(() => { if (tokenData) { diff --git a/web/src/components/Editor/Editor.tsx b/web/src/components/Editor/Editor.tsx index 5c2b5ff59..02f8bfca0 100644 --- a/web/src/components/Editor/Editor.tsx +++ b/web/src/components/Editor/Editor.tsx @@ -47,7 +47,6 @@ export const Editor = React.memo(function CodeMirrorReactEditor({ onViewUpdate, darkTheme }: EditorProps) { - const contentRef = useRef(content) const view = useRef() const ref = useRef() const languageConfig = useMemo(() => new Compartment(), []) @@ -139,14 +138,5 @@ export const Editor = React.memo(function CodeMirrorReactEditor({ } }, [filename, forMarkdown, view, languageConfig, markdownLanguageSupport]) - useEffect(() => { - if (contentRef.current !== content) { - contentRef.current = content - viewRef?.current?.dispatch({ - changes: { from: 0, to: viewRef?.current?.state.doc.length, insert: content } - }) - } - }, [content, viewRef]) - return }) diff --git a/web/src/components/ExecutionPageHeader/ExecutionPageHeader.tsx b/web/src/components/ExecutionPageHeader/ExecutionPageHeader.tsx index d5a30db3e..57714c1c2 100644 --- a/web/src/components/ExecutionPageHeader/ExecutionPageHeader.tsx +++ b/web/src/components/ExecutionPageHeader/ExecutionPageHeader.tsx @@ -10,7 +10,7 @@ import { useGetSpaceParam } from 'hooks/useGetSpaceParam' import type { CODEProps } from 'RouteDefinitions' import type { GitInfoProps } from 'utils/GitUtils' import { ExecutionStatus } from 'components/ExecutionStatus/ExecutionStatus' -import { getStatus } from 'utils/PipelineUtils' +import { getStatus } from 'utils/ExecutionUtils' import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' import { timeDistance } from 'utils/Utils' import css from './ExecutionPageHeader.module.scss' diff --git a/web/src/components/ExecutionStageList/ExecutionStageList.tsx b/web/src/components/ExecutionStageList/ExecutionStageList.tsx index 3a5098ea3..5e0058a34 100644 --- a/web/src/components/ExecutionStageList/ExecutionStageList.tsx +++ b/web/src/components/ExecutionStageList/ExecutionStageList.tsx @@ -3,7 +3,7 @@ import { Container, FlexExpander, Layout, Text } from '@harnessio/uicore' import cx from 'classnames' import type { TypesStage } from 'services/code' import { ExecutionState, ExecutionStatus } from 'components/ExecutionStatus/ExecutionStatus' -import { getStatus } from 'utils/PipelineUtils' +import { getStatus } from 'utils/ExecutionUtils' import { timeDistance } from 'utils/Utils' import css from './ExecutionStageList.module.scss' diff --git a/web/src/components/NewRepoModalButton/ImportForm/ImportForm.tsx b/web/src/components/NewRepoModalButton/ImportForm/ImportForm.tsx new file mode 100644 index 000000000..ab7b9fc42 --- /dev/null +++ b/web/src/components/NewRepoModalButton/ImportForm/ImportForm.tsx @@ -0,0 +1,189 @@ +import React, { useState } from 'react' +import { Intent } from '@blueprintjs/core' +import * as yup from 'yup' +import { Button, Container, Layout, FlexExpander, Formik, FormikForm, FormInput, Text } from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import { FontVariation } from '@harnessio/design-system' +import { useStrings } from 'framework/strings' +import { REGEX_VALID_REPO_NAME } from 'utils/Utils' +import { ImportFormData, RepoVisibility, parseUrl } from 'utils/GitUtils' +import css from '../NewRepoModalButton.module.scss' + +interface ImportFormProps { + handleSubmit: (data: ImportFormData) => void + loading: boolean // eslint-disable-next-line @typescript-eslint/no-explicit-any + hideModal: any +} + +const ImportForm = (props: ImportFormProps) => { + const { handleSubmit, loading, hideModal } = props + const { getString } = useStrings() + const [auth, setAuth] = useState(false) + + // eslint-disable-next-line no-control-regex + const MATCH_REPOURL_REGEX = /^(https?:\/\/(?:www\.)?(github|gitlab)\.com\/([^/]+\/[^/]+))/ + + const formInitialValues: ImportFormData = { + repoUrl: '', + username: '', + password: '', + name: '', + description: '', + isPublic: RepoVisibility.PRIVATE + } + return ( + + {formik => { + return ( + + { + const target = event.target as HTMLInputElement + formik.setFieldValue('repoUrl', target.value) + if (target.value) { + const provider = parseUrl(target.value) + if (provider?.fullRepo) { + formik.setFieldValue('name', provider.repoName ? provider.repoName : provider?.fullRepo) + formik.validateField('repoUrl') + } + } + }} + /> + { + setAuth(!auth) + }} + /> + + {auth ? ( + <> + + + + ) : null} +
+ { + formik.validateField('repoUrl') + }} + /> + + +
+ + + + + + + + {getString('public')} + + {getString('createRepoModal.publicLabel')} + + + + + + ), + value: RepoVisibility.PUBLIC + }, + { + label: ( + + + + + + {getString('private')} + + {getString('createRepoModal.privateLabel')} + + + + + + ), + value: RepoVisibility.PRIVATE + } + ]} + /> +
+ + + + + + + + ) +} + +const TriggersContent = () => { + return
Triggers
+} + +const PipelineSettings = () => { + const { getString } = useStrings() + + const { pipeline } = useParams() + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() + + const { + data: pipelineData, + error: pipelineError, + loading: pipelineLoading + } = useGet({ + path: `/api/v1/repos/${repoMetadata?.path}/+/pipelines/${pipeline}`, + lazy: !repoMetadata + }) + + const [selectedTab, setSelectedTab] = React.useState(TabOptions.SETTINGS) + + return ( + + + + + {selectedTab === TabOptions.SETTINGS && ( + + )} + {selectedTab === TabOptions.TRIGGERS && } + + + ) +} + +export default PipelineSettings diff --git a/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss b/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss new file mode 100644 index 000000000..23aa7cd55 --- /dev/null +++ b/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss @@ -0,0 +1,31 @@ +.pageHeader { + height: auto !important; + flex-direction: column !important; + align-items: flex-start !important; + gap: var(--spacing-small) !important; + padding-bottom: 0 !important; +} + +.breadcrumb { + align-items: center; + + a { + font-size: var(--font-size-small); + color: var(--primary-7); + } +} + +.tabs { + display: flex !important; + gap: 1rem !important; + .tab { + padding: 0.5rem 1rem !important; + cursor: pointer !important; + color: var(--grey-700) !important; + &.active { + color: var(--grey-900) !important; + font-weight: 600 !important; + border-bottom: 2px solid var(--primary-7) !important; // example of an active tab indicator + } + } +} diff --git a/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss.d.ts b/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss.d.ts new file mode 100644 index 000000000..a952ffaf5 --- /dev/null +++ b/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss.d.ts @@ -0,0 +1,7 @@ +/* eslint-disable */ +// This is an auto-generated file +export declare const active: string +export declare const breadcrumb: string +export declare const pageHeader: string +export declare const tab: string +export declare const tabs: string diff --git a/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.tsx b/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.tsx new file mode 100644 index 000000000..fe055d0b6 --- /dev/null +++ b/web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.tsx @@ -0,0 +1,82 @@ +import React, { Fragment } from 'react' +import { Layout, PageHeader, Container } from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import { Color } from '@harnessio/design-system' +import { Link, useParams } from 'react-router-dom' +import { useStrings } from 'framework/strings' +import { useAppContext } from 'AppContext' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import type { CODEProps } from 'RouteDefinitions' +import type { GitInfoProps } from 'utils/GitUtils' +import { TabOptions } from 'components/PipelineSettings/PipelineSettings' +import css from './PipelineSettingsPageHeader.module.scss' + +interface BreadcrumbLink { + label: string + url: string +} + +interface PipelineSettingsPageHeaderProps extends Optional, 'repoMetadata'> { + title: string | JSX.Element + dataTooltipId: string + extraBreadcrumbLinks?: BreadcrumbLink[] + selectedTab: TabOptions + setSelectedTab: (tab: TabOptions) => void +} + +const PipelineSettingsPageHeader = ({ + repoMetadata, + title, + extraBreadcrumbLinks = [], + selectedTab, + setSelectedTab +}: PipelineSettingsPageHeaderProps) => { + const { gitRef } = useParams() + const { getString } = useStrings() + const space = useGetSpaceParam() + const { routes } = useAppContext() + + if (!repoMetadata) { + return null + } + + return ( + + {getString('repositories')} + + + {repoMetadata.uid} + + {extraBreadcrumbLinks.map(link => ( + + + {link.label} + + ))} + + } + content={ + + {Object.values(TabOptions).map(tabOption => ( +
setSelectedTab(tabOption)}> + {tabOption} +
+ ))} +
+ } + /> + ) +} + +export default PipelineSettingsPageHeader diff --git a/web/src/components/PluginsPanel/PluginsPanel.tsx b/web/src/components/PluginsPanel/PluginsPanel.tsx index 82002b53a..2dd15dfc7 100644 --- a/web/src/components/PluginsPanel/PluginsPanel.tsx +++ b/web/src/components/PluginsPanel/PluginsPanel.tsx @@ -207,7 +207,7 @@ export const PluginsPanel = ({ version = YamlVersion.V0, onPluginAddUpdate }: Pl case PluginCategory.Harness: constructedPayload = version === YamlVersion.V1 - ? { type: 'script', spec: constructedPayload } + ? { type: 'script', spec: { run: get(constructedPayload, 'script', '') } } : { name: 'run step', commands: [get(constructedPayload, 'script', '')] } } onPluginAddUpdate?.(isUpdate, constructedPayload) diff --git a/web/src/components/SpaceSelector/SpaceSelector.tsx b/web/src/components/SpaceSelector/SpaceSelector.tsx index 8dc605e8b..99b5e08a1 100644 --- a/web/src/components/SpaceSelector/SpaceSelector.tsx +++ b/web/src/components/SpaceSelector/SpaceSelector.tsx @@ -92,6 +92,11 @@ export const SpaceSelector: React.FC = ({ onSelect }) => { variation={ButtonVariation.PRIMARY} icon="plus" onRefetch={voidFn(refetch)} + onSubmit={spaceData => { + history.push(routes.toCODERepositories({ space: spaceData.path as string })) + setOpened(false) + }} + fromSpace={true} /> ) @@ -213,6 +218,10 @@ export const SpaceSelector: React.FC = ({ onSelect }) => { variation={ButtonVariation.PRIMARY} icon="plus" onRefetch={voidFn(refetch)} + onSubmit={spaceData => { + history.push(routes.toCODERepositories({ space: spaceData.path as string })) + }} + fromSpace={true} /> } message={ {getString('emptySpaceText')}} diff --git a/web/src/components/UpdateSecretModal/UpdateSecretModal.tsx b/web/src/components/UpdateSecretModal/UpdateSecretModal.tsx new file mode 100644 index 000000000..4b8b0f9c4 --- /dev/null +++ b/web/src/components/UpdateSecretModal/UpdateSecretModal.tsx @@ -0,0 +1,156 @@ +import React, { useRef, useState } from 'react' +import * as yup from 'yup' +import { useMutate } from 'restful-react' +import { FontVariation, Intent } from '@harnessio/design-system' +import { + Button, + Dialog, + Layout, + Heading, + Container, + Formik, + FormikForm, + FormInput, + FlexExpander, + useToaster, + StringSubstitute +} from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import { useStrings } from 'framework/strings' +import { useModalHook } from 'hooks/useModalHook' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import type { OpenapiUpdateSecretRequest, TypesSecret } from 'services/code' +import type { SecretFormData } from 'components/NewSecretModalButton/NewSecretModalButton' +import { getErrorMessage } from 'utils/Utils' + +const useUpdateSecretModal = () => { + const { getString } = useStrings() + const space = useGetSpaceParam() + const { showError, showSuccess } = useToaster() + const [secret, setSecret] = useState() + const postUpdate = useRef() + + const { mutate: updateSecret, loading } = useMutate({ + verb: 'PATCH', + path: `/api/v1/secrets/${space}/${secret?.uid}/+` + }) + + const handleSubmit = async (formData: SecretFormData) => { + try { + const payload: OpenapiUpdateSecretRequest = { + data: formData.value, + description: formData.description, + uid: formData.name + } + await updateSecret(payload) + hideModal() + showSuccess( + + ) + postUpdate.current?.() + } catch (exception) { + showError(getErrorMessage(exception), 0, getString('secrets.failedToUpdateSecret')) + } + } + + const [openModal, hideModal] = useModalHook(() => { + const onClose = () => { + hideModal() + } + + return ( + + + + {getString('secrets.updateSecret')} + + + + + + + + + + + + ) + }, [secret]) + + return { + openModal: ({ + secretToUpdate, + openSecretUpdate + }: { + secretToUpdate: TypesSecret + openSecretUpdate: () => Promise + }) => { + setSecret(secretToUpdate) + postUpdate.current = openSecretUpdate + openModal() + } + } +} + +export default useUpdateSecretModal diff --git a/web/src/framework/strings/stringTypes.ts b/web/src/framework/strings/stringTypes.ts index e465d5136..128b0cd2b 100644 --- a/web/src/framework/strings/stringTypes.ts +++ b/web/src/framework/strings/stringTypes.ts @@ -105,6 +105,7 @@ export interface StringsMap { createBranchFromBranch: string createBranchFromTag: string createFile: string + createNewRepo: string createNewToken: string createNewUser: string createPullRequest: string @@ -193,6 +194,8 @@ export interface StringsMap { failedToCreateSpace: string failedToDeleteBranch: string failedToDeleteWebhook: string + failedToFetchFileContent: string + failedToImportSpace: string failedToSavePipeline: string fileDeleted: string fileTooLarge: string @@ -214,6 +217,40 @@ export interface StringsMap { history: string 'homepage.firstStep': string 'homepage.welcomeText': string + importGitRepo: string + importProgress: string + 'importRepo.failedToImportRepo': string + 'importRepo.passToken': string + 'importRepo.passwordPlaceholder': string + 'importRepo.passwordReq': string + 'importRepo.reqAuth': string + 'importRepo.required': string + 'importRepo.spaceNameReq': string + 'importRepo.title': string + 'importRepo.url': string + 'importRepo.urlPlaceholder': string + 'importRepo.userPlaceholder': string + 'importRepo.usernameReq': string + 'importRepo.validation': string + 'importSpace.authorization': string + 'importSpace.content': string + 'importSpace.createNewSpace': string + 'importSpace.descPlaceholder': string + 'importSpace.description': string + 'importSpace.details': string + 'importSpace.gitProvider': string + 'importSpace.githubOrg': string + 'importSpace.gitlabGroup': string + 'importSpace.importLabel': string + 'importSpace.invalidUrl': string + 'importSpace.next': string + 'importSpace.orgNamePlaceholder': string + 'importSpace.orgRequired': string + 'importSpace.organizationName': string + 'importSpace.providerRequired': string + 'importSpace.spaceName': string + 'importSpace.spaceNameRequired': string + 'importSpace.title': string in: string inactiveBranches: string isRequired: string @@ -326,12 +363,18 @@ export interface StringsMap { pending: string 'pipelines.createNewPipeline': string 'pipelines.created': string + 'pipelines.deletePipelineButton': string + 'pipelines.deletePipelineConfirm': string + 'pipelines.deletePipelineError': string + 'pipelines.deletePipelineSuccess': string + 'pipelines.deletePipelineWarning': string 'pipelines.editPipeline': string 'pipelines.enterPipelineName': string 'pipelines.enterYAMLPath': string 'pipelines.executionCouldNotStart': string 'pipelines.executionStarted': string 'pipelines.failedToCreatePipeline': string + 'pipelines.failedToUpdatePipeline': string 'pipelines.lastExecution': string 'pipelines.name': string 'pipelines.newPipelineButton': string @@ -339,12 +382,13 @@ export interface StringsMap { 'pipelines.run': string 'pipelines.saveAndRun': string 'pipelines.time': string - poweredByAI: string + 'pipelines.updatePipelineSuccess': string 'pipelines.updated': string 'pipelines.yamlPath': string 'plugins.addAPlugin': string 'plugins.stepLabel': string 'plugins.title': string + poweredByAI: string 'pr.ableToMerge': string 'pr.addDescription': string 'pr.authorCommentedPR': string @@ -495,10 +539,13 @@ export interface StringsMap { 'secrets.enterSecretName': string 'secrets.failedToCreate': string 'secrets.failedToDeleteSecret': string + 'secrets.failedToUpdateSecret': string 'secrets.name': string 'secrets.newSecretButton': string 'secrets.noData': string 'secrets.secretDeleted': string + 'secrets.secretUpdated': string + 'secrets.updateSecret': string 'secrets.value': string selectBranchPlaceHolder: string selectRange: string diff --git a/web/src/hooks/usePipelineEventStream.tsx b/web/src/hooks/useSpaceSSE.tsx similarity index 62% rename from web/src/hooks/usePipelineEventStream.tsx rename to web/src/hooks/useSpaceSSE.tsx index 6e36d088d..ebfbc3dc7 100644 --- a/web/src/hooks/usePipelineEventStream.tsx +++ b/web/src/hooks/useSpaceSSE.tsx @@ -1,25 +1,26 @@ import { useEffect, useRef } from 'react' -type UsePipelineEventStreamProps = { +type UseSpaceSSEProps = { space: string - onEvent: (data: any) => void + events: string[] + onEvent: (data: any, type: string) => void onError?: (event: Event) => void shouldRun?: boolean } -const usePipelineEventStream = ({ space, onEvent, onError, shouldRun = true }: UsePipelineEventStreamProps) => { +const useSpaceSSE = ({ space, events, onEvent, onError, shouldRun = true }: UseSpaceSSEProps) => { //TODO - this is not working right - need to get to the bottom of too many streams being opened and closed... can miss events! const eventSourceRef = useRef(null) useEffect(() => { // Conditionally establish the event stream - don't want to open on a finished execution - if (shouldRun) { + if (shouldRun && events.length > 0) { if (!eventSourceRef.current) { - eventSourceRef.current = new EventSource(`/api/v1/spaces/${space}/stream`) + eventSourceRef.current = new EventSource(`/api/v1/spaces/${space}/+/events`) const handleMessage = (event: MessageEvent) => { const data = JSON.parse(event.data) - onEvent(data) + onEvent(data, event.type) } const handleError = (event: Event) => { @@ -27,12 +28,21 @@ const usePipelineEventStream = ({ space, onEvent, onError, shouldRun = true }: U eventSourceRef?.current?.close() } - eventSourceRef.current.addEventListener('message', handleMessage) + // always register error eventSourceRef.current.addEventListener('error', handleError) + // register requested events + for (const i in events) { + const eventType = events[i] + eventSourceRef.current.addEventListener(eventType, handleMessage) + } + return () => { - eventSourceRef.current?.removeEventListener('message', handleMessage) eventSourceRef.current?.removeEventListener('error', handleError) + for (const i in events) { + const eventType = events[i] + eventSourceRef.current?.removeEventListener(eventType, handleMessage) + } eventSourceRef.current?.close() eventSourceRef.current = null } @@ -44,7 +54,7 @@ const usePipelineEventStream = ({ space, onEvent, onError, shouldRun = true }: U eventSourceRef.current = null } } - }, [space, shouldRun, onEvent, onError]) + }, [space, events, shouldRun, onEvent, onError]) } -export default usePipelineEventStream +export default useSpaceSSE diff --git a/web/src/i18n/strings.en.yaml b/web/src/i18n/strings.en.yaml index 28d463d49..69424e0e2 100644 --- a/web/src/i18n/strings.en.yaml +++ b/web/src/i18n/strings.en.yaml @@ -589,6 +589,7 @@ spaceMemberships: memberUpdated: Member updated successfully. memberAdded: Member added successfully. failedToCreateSpace: Failed to create Space. Please try again. +failedToImportSpace: Failed to import Space. Please try again. failedToCreatePipeline: Failed to create Pipeline. Please try again. failedToSavePipeline: Failed to save Pipeline. Please try again. enterName: Enter the name @@ -627,7 +628,7 @@ repoDelete: deleteToastSuccess: Repository deleted successfully deleteConfirmButton2: Delete this repository pipelines: - noData: There are no pipelines :( + noData: There are no pipelines newPipelineButton: New Pipeline name: Pipeline Name createNewPipeline: Create New Pipeline @@ -644,8 +645,15 @@ pipelines: updated: Pipeline updated successfully executionStarted: Pipeline execution started successfully executionCouldNotStart: Failure while starting Pipeline execution + deletePipelineWarning: This will permanently delete the {{pipeline}} pipeline and all executions. + deletePipelineButton: Delete pipeline + deletePipelineConfirm: Are you sure you want to delete the pipeline {{pipeline}}? You can't undo this action. + deletePipelineSuccess: Pipeline {{pipeline}} deleted. + deletePipelineError: Failed to delete Pipeline. Please try again. + updatePipelineSuccess: Pipeline {{pipeline}} updated. + failedToUpdatePipeline: Failed to update Pipeline. Please try again. executions: - noData: There are no executions :( + noData: There are no executions newExecutionButton: Run Pipeline name: Execution Name description: Description @@ -657,7 +665,7 @@ executions: selectRange: Shift-click to select a range allCommits: All Commits secrets: - noData: There are no secrets :( + noData: There are no secrets newSecretButton: New Secret name: Secret Name failedToCreate: Failed to create Secret. Please try again. @@ -666,9 +674,12 @@ secrets: createSecret: Create Secret createSuccess: Secret created successfully secretDeleted: Secret {uid} deleted. + secretUpdated: Secret {uid} updated. deleteSecretConfirm: Are you sure you want to delete secret {{uid}}? You can't undo this action. failedToDeleteSecret: Failed to delete Secret. Please try again. - deleteSecret: Delete Secrets + failedToUpdateSecret: Failed to update Secret. Please try again. + deleteSecret: Delete secret + updateSecret: Update secret userUpdateSuccess: 'User updated successfully' viewFile: View File searchResult: 'Search Result {count}' @@ -676,8 +687,46 @@ aiSearch: AI Search codeSearch: Code Search startSearching: Begin search by describing what you are looking for. poweredByAI: Unlock the power of AI with Semantic Code search. Try phrases like "Locate the code for authentication". +failedToFetchFileContent: 'ERROR: Failed to fetch file content.' run: Run plugins: title: Plugins addAPlugin: Add a {{category}} plugin stepLabel: step +createNewRepo: Create New repository +importGitRepo: Import Git Repository +importRepo: + title: Import Repository + url: Repository URL + urlPlaceholder: Enter the Repository URL + reqAuth: Requires Authorization + userPlaceholder: Enter Username + passwordPlaceholder: Enter Password + passToken: Password/Token + failedToImportRepo: Failed to import repository. Please try again. + validation: Invalid GitHub or GitLab URL + required: Repository URL is required + spaceNameReq: Enter a name for the new space + usernameReq: Username is required + passwordReq: Password is required +importSpace: + title: Import Space + createNewSpace: Create New Space + authorization: Authorization + content: Import a GitLab Group or a GitHub Org to a new Space in Gitness. Entities at the top level will be imported to the space. + next: Next step + details: Details of target to import + organizationName: Organization Name + orgNamePlaceholder: Enter the org name here + spaceName: Space Name + description: Description (optional) + descPlaceholder: Enter the description + importLabel: What to import + providerRequired: Git Provider is required + orgRequired: Organization name is required + spaceNameRequired: Space name is required + gitProvider: Git Provider + invalidUrl: Invalid GitHub or GitLab URL + githubOrg: GitHub Organization Name + gitlabGroup: GitLab Group Name +importProgress: 'Import in progress...' diff --git a/web/src/layouts/menu/DefaultMenu.tsx b/web/src/layouts/menu/DefaultMenu.tsx index 4ecca3f79..91eedac6f 100644 --- a/web/src/layouts/menu/DefaultMenu.tsx +++ b/web/src/layouts/menu/DefaultMenu.tsx @@ -126,11 +126,12 @@ export const DefaultMenu: React.FC = () => { })} /> )} + diff --git a/web/src/pages/AddUpdatePipeline/AddUpdatePipeline.tsx b/web/src/pages/AddUpdatePipeline/AddUpdatePipeline.tsx index 149d15418..c8a3bf695 100644 --- a/web/src/pages/AddUpdatePipeline/AddUpdatePipeline.tsx +++ b/web/src/pages/AddUpdatePipeline/AddUpdatePipeline.tsx @@ -31,27 +31,33 @@ import { getErrorMessage } from 'utils/Utils' import { decodeGitContent } from 'utils/GitUtils' import pipelineSchemaV1 from './schema/pipeline-schema-v1.json' import pipelineSchemaV0 from './schema/pipeline-schema-v0.json' -import { YamlVersion } from './Constants' +import { DRONE_CONFIG_YAML_FILE_SUFFIXES, YamlVersion } from './Constants' import css from './AddUpdatePipeline.module.scss' const StarterPipelineV1: Record = { version: 1, - stages: [ - { - type: 'ci', - spec: { - steps: [ - { - type: 'script', - spec: { - run: 'echo hello world' + kind: 'pipeline', + spec: { + stages: [ + { + name: 'build', + type: 'ci', + spec: { + steps: [ + { + name: 'build', + type: 'script', + spec: { + image: 'golang', + run: 'echo "hello world"' + } } - } - ] + ] + } } - } - ] + ] + } } const StarterPipelineV0: Record = { @@ -79,15 +85,13 @@ interface PipelineSaveAndRunOption { } const AddUpdatePipeline = (): JSX.Element => { - const version = YamlVersion.V0 const { routes } = useAppContext() const { getString } = useStrings() const { pipeline } = useParams() const { repoMetadata } = useGetRepositoryMetadata() const { showError, showSuccess, clear: clearToaster } = useToaster() - const [pipelineAsObj, setPipelineAsObj] = useState>( - version === YamlVersion.V0 ? StarterPipelineV0 : StarterPipelineV1 - ) + const [yamlVersion, setYAMLVersion] = useState() + const [pipelineAsObj, setPipelineAsObj] = useState>({}) const [pipelineAsYAML, setPipelineAsYaml] = useState('') const { openModal: openRunPipelineModal } = useRunPipelineModal() const repoPath = useMemo(() => repoMetadata?.path || '', [repoMetadata]) @@ -139,6 +143,15 @@ const AddUpdatePipeline = (): JSX.Element => { [pipelineYAMLFileContent?.content] ) + // set YAML version for Pipeline setup + useEffect(() => { + setYAMLVersion( + DRONE_CONFIG_YAML_FILE_SUFFIXES.find((suffix: string) => pipelineData?.config_path?.endsWith(suffix)) + ? YamlVersion.V0 + : YamlVersion.V1 + ) + }, [pipelineData]) + // check if file already exists and has some content useEffect(() => { setIsExistingPipeline(!isEmpty(originalPipelineYAMLFileContent) && !isUndefined(originalPipelineYAMLFileContent)) @@ -151,12 +164,12 @@ const AddUpdatePipeline = (): JSX.Element => { } else { // load with starter pipeline try { - setPipelineAsYaml(stringify(pipelineAsObj)) + setPipelineAsYaml(stringify(yamlVersion === YamlVersion.V1 ? StarterPipelineV1 : StarterPipelineV0)) } catch (ex) { // ignore exception } } - }, [isExistingPipeline, originalPipelineYAMLFileContent, pipelineAsObj]) + }, [yamlVersion, isExistingPipeline, originalPipelineYAMLFileContent, pipelineAsObj]) // find if editor content was modified useEffect(() => { @@ -206,7 +219,7 @@ const AddUpdatePipeline = (): JSX.Element => { const updatePipeline = (payload: Record): Record => { const pipelineAsObjClone = { ...pipelineAsObj } - const stepInsertPath = version === YamlVersion.V0 ? 'steps' : 'stages.0.spec.steps' + const stepInsertPath = yamlVersion === YamlVersion.V1 ? 'spec.stages.0.spec.steps' : 'steps' let existingSteps: [unknown] = get(pipelineAsObjClone, stepInsertPath, []) if (existingSteps.length > 0) { existingSteps.push(payload) @@ -309,13 +322,13 @@ const AddUpdatePipeline = (): JSX.Element => { setPipelineAsYaml(value)} /> - + diff --git a/web/src/pages/AddUpdatePipeline/Constants.ts b/web/src/pages/AddUpdatePipeline/Constants.ts index 818dba710..ed17db88d 100644 --- a/web/src/pages/AddUpdatePipeline/Constants.ts +++ b/web/src/pages/AddUpdatePipeline/Constants.ts @@ -5,3 +5,5 @@ export enum YamlVersion { export const DEFAULT_YAML_PATH_PREFIX = '.harness/' export const DEFAULT_YAML_PATH_SUFFIX = '.yaml' + +export const DRONE_CONFIG_YAML_FILE_SUFFIXES = ['.drone.yml', '.drone.yaml'] diff --git a/web/src/pages/AddUpdatePipeline/schema/pipeline-schema-v1.json b/web/src/pages/AddUpdatePipeline/schema/pipeline-schema-v1.json index 5a94510c4..381bd8121 100644 --- a/web/src/pages/AddUpdatePipeline/schema/pipeline-schema-v1.json +++ b/web/src/pages/AddUpdatePipeline/schema/pipeline-schema-v1.json @@ -1,18 +1,142 @@ { "definitions": { - "Pipeline": { - "title": "Pipeline", - "description": "Pipeline defines the pipeline execution.", + "Config": { + "title": "Config", + "description": "Config defines the a resource configuration.", "type": "object", "properties": { "version": { "description": "Version defines the schema version.", "type": ["string", "number"] }, - "name": { - "description": "Pipeline provides the pipeline name.", - "type": "string" + "kind": { + "type": "string", + "description": "Type defines the schema type.", + "enum": ["pipeline", "plugin", "template"] }, + "type": { + "type": "string", + "description": "Type defines the schema type.", + "enum": ["pipeline", "stage", "step"] + }, + "name": { + "type": "string", + "description": "Name defines an optional resource name." + } + }, + "required": ["kind"], + "oneOf": [ + { + "allOf": [ + { + "properties": { + "kind": { + "const": "pipeline" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/Pipeline" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "kind": { + "const": "template" + }, + "type": { + "const": "stage" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/TemplateStage" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "kind": { + "const": "template" + }, + "type": { + "const": "step" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/TemplateStep" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "kind": { + "const": "plugin" + }, + "type": { + "const": "step" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/PluginStep" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "kind": { + "const": "plugin" + }, + "type": { + "const": "stage" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/PluginStage" + } + } + } + ] + } + ], + "x-go-skip": true, + "x-file": "config.yaml" + }, + "Pipeline": { + "title": "Pipeline", + "description": "Pipeline defines the pipeline execution.", + "type": "object", + "properties": { "stages": { "type": "array", "description": "Stages defines a list of pipeline stages.", @@ -32,7 +156,7 @@ "description": "Options defines global configuration options." } }, - "required": ["version", "stages"], + "required": ["stages"], "examples": [ { "version": 1, @@ -105,8 +229,8 @@ "description": "The stage conditional logic.", "$ref": "#/definitions/When" }, - "on": { - "$ref": "#/definitions/On" + "failure": { + "$ref": "#/definitions/FailureList" } }, "oneOf": [ @@ -435,47 +559,26 @@ "x-go-skip": true, "x-file": "when_expr.yaml" }, - "On": { - "title": "On", - "type": "object", - "properties": { - "failure": { - "oneOf": [ - { - "$ref": "#/definitions/Failure" - }, - { - "type": "array", - "items": { - "$ref": "#/definitions/Failure" - } - } - ] + "FailureList": { + "title": "FailureList", + "oneOf": [ + { + "$ref": "#/definitions/Failure" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/Failure" + } } - }, + ], "x-go-skip": true, - "x-file": "on.yaml" + "x-file": "failure_list.yaml" }, "Failure": { "title": "Failure", "type": "object", - "description": "Failure defines a failure strategy.", "properties": { - "type": { - "description": "Type defines the failure strategy type.", - "type": "string", - "enum": [ - "abort", - "ignore", - "manual-intervention", - "retry", - "success", - "stage-rollback", - "pipeline-rollback", - "retry-step-group", - "fail" - ] - }, "errors": { "description": "Errors specifies the types of errors.", "type": "array", @@ -497,9 +600,125 @@ "user-mark-fail" ] } + }, + "action": { + "$ref": "#/definitions/FailureAction" + } + }, + "x-file": "failure.yaml" + }, + "FailureAction": { + "title": "FailureAction", + "type": "object", + "description": "Failure defines a failure strategy.", + "properties": { + "type": { + "description": "Type defines the failure strategy type.", + "type": "string", + "enum": [ + "abort", + "fail", + "ignore", + "manual-intervention", + "pipeline-rollback", + "retry", + "retry-step-group", + "stage-rollback", + "success" + ] } }, "oneOf": [ + { + "allOf": [ + { + "properties": { + "type": { + "const": "success" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "fail" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "retry-step-group" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "stage-rollback" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "pipeline-rollback" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, { "allOf": [ { @@ -573,7 +792,13 @@ ] } ], - "x-file": "failure.yaml" + "x-file": "failure_action.yaml" + }, + "EmptySpec": { + "title": "EmptySpec", + "type": "object", + "properties": {}, + "x-file": "empty_spec.yaml" }, "Abort": { "title": "Abort", @@ -611,18 +836,169 @@ } ] }, - "on": { - "$ref": "#/definitions/On" + "failure": { + "$ref": "#/definitions/RetryFailure" } }, "type": "object", "x-file": "failure_retry.yaml" }, + "RetryFailure": { + "title": "RetryFailure", + "type": "object", + "properties": { + "action": { + "$ref": "#/definitions/RetryFailureAction" + } + }, + "x-file": "retry_failure.yaml" + }, + "RetryFailureAction": { + "title": "RetryFailureAction", + "type": "object", + "description": "Failure action on failure for all retries", + "properties": { + "type": { + "description": "Type defines the failure strategy type.", + "type": "string", + "enum": ["abort", "fail", "ignore", "manual-intervention", "pipeline-rollback", "stage-rollback", "success"] + } + }, + "oneOf": [ + { + "allOf": [ + { + "properties": { + "type": { + "const": "success" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "fail" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "stage-rollback" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "pipeline-rollback" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "abort" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/Abort" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "ignore" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/Ignore" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "manual-intervention" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/ManualIntervention" + } + } + } + ] + } + ], + "x-file": "retry_failure_action.yaml" + }, "ManualIntervention": { "title": "ManualIntervention", "properties": { - "on": { - "$ref": "#/definitions/On" + "timeout_action": { + "$ref": "#/definitions/TimeoutAction" }, "timeout": { "format": "duration", @@ -633,6 +1009,129 @@ "type": "object", "x-file": "failure_manual.yaml" }, + "TimeoutAction": { + "title": "TimeoutAction", + "type": "object", + "description": "Manual intervention for timeout failure action", + "properties": { + "type": { + "description": "Type defines the failure strategy type.", + "type": "string", + "enum": ["abort", "ignore", "success", "stage-rollback", "pipeline-rollback", "fail"] + } + }, + "oneOf": [ + { + "allOf": [ + { + "properties": { + "type": { + "const": "success" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "fail" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "stage-rollback" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "pipeline-rollback" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/EmptySpec" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "abort" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/Abort" + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "type": { + "const": "ignore" + } + } + }, + { + "properties": { + "spec": { + "$ref": "#/definitions/Ignore" + } + } + } + ] + } + ], + "x-file": "timeout_action.yaml" + }, "StageCI": { "properties": { "cache": { @@ -1080,8 +1579,8 @@ "description": "The stage conditional logic.", "$ref": "#/definitions/When" }, - "on": { - "$ref": "#/definitions/On" + "failure": { + "$ref": "#/definitions/FailureList" } }, "oneOf": [ @@ -2138,6 +2637,10 @@ }, "description": "Enum defines a list of accepted input values.", "x-go-type": "[]string" + }, + "mask": { + "type": "boolean", + "description": "Mask indicates the input should be masked." } }, "x-file": "input.yaml" @@ -2362,11 +2865,115 @@ } }, "x-file": "status.yaml" + }, + "TemplateStage": { + "title": "TemplateStage", + "description": "TemplateStage defines a stage-level template", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The template name." + }, + "description": { + "type": "string", + "description": "The template description." + }, + "stage": { + "$ref": "#/definitions/Stage" + }, + "inputs": { + "type": "object", + "description": "Inputs defines the template input parameters.", + "additionalProperties": { + "$ref": "#/definitions/Input" + } + } + }, + "x-file": "template_stage.yaml" + }, + "TemplateStep": { + "title": "TemplateStep", + "description": "TemplateStep defines a step-level template", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The template name." + }, + "description": { + "type": "string", + "description": "The template description." + }, + "step": { + "$ref": "#/definitions/Step" + }, + "inputs": { + "type": "object", + "description": "Inputs defines the template input parameters.", + "additionalProperties": { + "$ref": "#/definitions/Input" + } + } + }, + "x-file": "template_step.yaml" + }, + "PluginStep": { + "title": "PluginStep", + "description": "PluginStep defines a step-level plugin", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The plugin name." + }, + "description": { + "type": "string", + "description": "The plugin description." + }, + "step": { + "$ref": "#/definitions/Step" + }, + "inputs": { + "type": "object", + "description": "Inputs defines the plugin input parameters.", + "additionalProperties": { + "$ref": "#/definitions/Input" + } + } + }, + "x-file": "plugin_step.yaml" + }, + "PluginStage": { + "title": "PluginStage", + "description": "PluginStage defines a stage-level plugin", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The plugin name." + }, + "description": { + "type": "string", + "description": "The plugin description." + }, + "stage": { + "$ref": "#/definitions/Stage" + }, + "inputs": { + "type": "object", + "description": "Inputs defines the plugin input parameters.", + "additionalProperties": { + "$ref": "#/definitions/Input" + } + } + }, + "x-file": "plugin_stage.yaml" } }, "oneOf": [ { - "$ref": "#/definitions/Pipeline" + "$ref": "#/definitions/Config" } ] } diff --git a/web/src/pages/Execution/Execution.tsx b/web/src/pages/Execution/Execution.tsx index 68efba320..0f509639a 100644 --- a/web/src/pages/Execution/Execution.tsx +++ b/web/src/pages/Execution/Execution.tsx @@ -13,7 +13,7 @@ import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' import { Split } from 'components/Split/Split' import { ExecutionPageHeader } from 'components/ExecutionPageHeader/ExecutionPageHeader' -import usePipelineEventStream from 'hooks/usePipelineEventStream' +import useSpaceSSE from 'hooks/useSpaceSSE' import { ExecutionState } from 'components/ExecutionStatus/ExecutionStatus' import noExecutionImage from '../RepositoriesListing/no-repo.svg' import css from './Execution.module.scss' @@ -45,14 +45,14 @@ const Execution = () => { } }, [execution]) - usePipelineEventStream({ + useSpaceSSE({ space, - onEvent: (data: any) => { + events: ['execution_updated', 'execution_completed'], + onEvent: data => { if ( - (data.type === 'execution_updated' || data.type === 'execution_completed') && - data.data?.repo_id === execution?.repo_id && - data.data?.pipeline_id === execution?.pipeline_id && - data.data?.number === execution?.number + data?.repo_id === execution?.repo_id && + data?.pipeline_id === execution?.pipeline_id && + data?.number === execution?.number ) { //TODO - revisit full refresh - can I use the message to update the execution? executionRefetch() @@ -65,7 +65,7 @@ const Execution = () => { { const pageBrowser = useQueryParams() const pageInit = pageBrowser.page ? parseInt(pageBrowser.page) : 1 const [page, setPage] = usePageIndex(pageInit) - const { showError, showSuccess } = useToaster() const { repoMetadata, error, loading, refetch, space } = useGetRepositoryMetadata() @@ -68,13 +67,14 @@ const ExecutionList = () => { } }, [executions]) - usePipelineEventStream({ + useSpaceSSE({ space, - onEvent: (data: any) => { + events: ['execution_updated', 'execution_completed'], + onEvent: data => { // ideally this would include number - so we only check for executions on the page - but what if new executions are kicked off? - could check for ids that are higher than the lowest id on the page? if ( executions?.some( - execution => execution.repo_id === data.data?.repo_id && execution.pipeline_id === data.data?.pipeline_id + execution => execution.repo_id === data?.repo_id && execution.pipeline_id === data?.pipeline_id ) ) { //TODO - revisit full refresh - can I use the message to update the execution? @@ -83,19 +83,11 @@ const ExecutionList = () => { } }) - const { mutate, loading: mutateLoading } = useMutate({ - verb: 'POST', - path: `/api/v1/repos/${repoMetadata?.path}/+/pipelines/${pipeline}/executions` - }) + const { openModal: openRunPipelineModal } = useRunPipelineModal() const handleClick = async () => { - try { - //TODO - this should NOT be hardcoded to master branch - need a modal to insert branch - but useful for testing until then - await mutate({ branch: 'master' }) - showSuccess('Build started') - executionsRefetch() - } catch { - showError('Failed to start build') + if (repoMetadata && pipeline) { + openRunPipelineModal({ repoMetadata, pipeline }) } } @@ -104,7 +96,6 @@ const ExecutionList = () => { text={getString('executions.newExecutionButton')} variation={ButtonVariation.PRIMARY} icon="play-outline" - disabled={mutateLoading} onClick={handleClick}> ) diff --git a/web/src/pages/Home/Home.module.scss b/web/src/pages/Home/Home.module.scss index 4594e5b56..6d322fc66 100644 --- a/web/src/pages/Home/Home.module.scss +++ b/web/src/pages/Home/Home.module.scss @@ -10,3 +10,28 @@ height: 100vh; padding-top: 26% !important; } + +.buttonContainer { + :global { + .bp3-button-text { + font-size: 16px !important; + } + } +} + +.bigButton { + :global { + .SplitButton--splitButton { + > .bp3-button { + .bp3-button-text { + font-size: 16px !important; + --font-size: 16px !important; + } + } + } + .bp3-icon-chevron-down { + left: -7px; + position: relative; + } + } +} diff --git a/web/src/pages/Home/Home.module.scss.d.ts b/web/src/pages/Home/Home.module.scss.d.ts index e948d155b..a0d0175ee 100644 --- a/web/src/pages/Home/Home.module.scss.d.ts +++ b/web/src/pages/Home/Home.module.scss.d.ts @@ -1,5 +1,7 @@ /* eslint-disable */ // This is an auto-generated file +export declare const bigButton: string +export declare const buttonContainer: string export declare const container: string export declare const main: string export declare const spaceContainer: string diff --git a/web/src/pages/Home/Home.tsx b/web/src/pages/Home/Home.tsx index c9f577cc2..1f0b25326 100644 --- a/web/src/pages/Home/Home.tsx +++ b/web/src/pages/Home/Home.tsx @@ -1,5 +1,5 @@ import React from 'react' -import { ButtonVariation, Container, Layout, PageBody, Text } from '@harnessio/uicore' +import { ButtonVariation, ButtonSize, Container, Layout, PageBody, Text } from '@harnessio/uicore' import { FontVariation } from '@harnessio/design-system' import { noop } from 'lodash-es' import { useHistory } from 'react-router-dom' @@ -21,17 +21,20 @@ export default function Home() { const NewSpaceButton = ( { history.push(routes.toCODERepositories({ space: spaceName })) }} + onSubmit={data => { + history.push(routes.toCODERepositories({ space: data.path as string })) + }} /> ) return ( @@ -51,7 +54,7 @@ export default function Home() { })} {getString('homepage.firstStep')} - + {NewSpaceButton} diff --git a/web/src/pages/PipelineList/PipelineList.tsx b/web/src/pages/PipelineList/PipelineList.tsx index 37e318f4b..4e970a4e6 100644 --- a/web/src/pages/PipelineList/PipelineList.tsx +++ b/web/src/pages/PipelineList/PipelineList.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useMemo, useState } from 'react' -import { Classes, Menu, MenuItem, Popover, Position } from '@blueprintjs/core' +import { Classes, Intent, Menu, MenuItem, Popover, Position } from '@blueprintjs/core' import { Avatar, Button, @@ -8,18 +8,20 @@ import { FlexExpander, Layout, PageBody, + StringSubstitute, TableV2 as Table, Text, - Utils + Utils, + useToaster } from '@harnessio/uicore' import { Color } from '@harnessio/design-system' import cx from 'classnames' import type { CellProps, Column } from 'react-table' import Keywords from 'react-keywords' import { Link, useHistory } from 'react-router-dom' -import { useGet } from 'restful-react' +import { useGet, useMutate } from 'restful-react' import { Calendar, Timer, GitFork } from 'iconoir-react' -import { useStrings } from 'framework/strings' +import { String, useStrings } from 'framework/strings' import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' import { NoResultCard } from 'components/NoResultCard/NoResultCard' @@ -32,11 +34,12 @@ import { useAppContext } from 'AppContext' import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' import { ExecutionStatus, ExecutionState } from 'components/ExecutionStatus/ExecutionStatus' -import { getStatus } from 'utils/PipelineUtils' +import { getStatus } from 'utils/ExecutionUtils' import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' import useNewPipelineModal from 'components/NewPipelineModal/NewPipelineModal' import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import usePipelineEventStream from 'hooks/usePipelineEventStream' +import useSpaceSSE from 'hooks/useSpaceSSE' +import { useConfirmAct } from 'hooks/useConfirmAction' import noPipelineImage from '../RepositoriesListing/no-repo.svg' import css from './PipelineList.module.scss' @@ -74,13 +77,12 @@ const PipelineList = () => { } }, [pipelines]) - usePipelineEventStream({ + useSpaceSSE({ space, - onEvent: (data: any) => { + events: ['execution_updated', 'execution_completed'], + onEvent: data => { // should I include pipeline id here? what if a new pipeline is created? coould check for ids that are higher than the lowest id on the page? - if ( - pipelines?.some(pipeline => pipeline.repo_id === data.data?.repo_id && pipeline.id === data.data?.pipeline_id) - ) { + if (pipelines?.some(pipeline => pipeline.repo_id === data?.repo_id && pipeline.id === data?.pipeline_id)) { //TODO - revisit full refresh - can I use the message to update the execution? pipelinesRefetch() } @@ -206,6 +208,15 @@ const PipelineList = () => { const [menuOpen, setMenuOpen] = useState(false) const record = row.original const { uid } = record + const repoPath = useMemo(() => repoMetadata?.path || '', [repoMetadata]) + + const confirmDeletePipeline = useConfirmAct() + const { showSuccess, showError } = useToaster() + const { mutate: deletePipeline } = useMutate({ + verb: 'DELETE', + path: `/api/v1/repos/${repoPath}/+/pipelines/${uid}` + }) + return ( { { + e.stopPropagation() + history.push(routes.toCODEPipelineEdit({ repoPath, pipeline: uid as string })) + }} + /> + { + e.stopPropagation() + confirmDeletePipeline({ + title: getString('pipelines.deletePipelineButton'), + confirmText: getString('delete'), + intent: Intent.DANGER, + message: ( + + ), + action: async () => { + deletePipeline({}) + .then(() => { + showSuccess( + , + 5000 + ) + pipelinesRefetch() + }) + .catch((pipelineDeleteErr: unknown) => { + showError(getErrorMessage(pipelineDeleteErr), 0, 'pipelines.deletePipelineError') + }) + } + }) + }} + /> + { e.stopPropagation() history.push( - routes.toCODEPipelineEdit({ repoPath: repoMetadata?.path || '', pipeline: uid as string }) + routes.toCODEPipelineSettings({ repoPath: repoMetadata?.path || '', pipeline: uid as string }) ) }} /> @@ -240,7 +296,7 @@ const PipelineList = () => { } } ], - [getString, repoMetadata?.path, routes, searchTerm] + [getString, history, repoMetadata?.path, routes, searchTerm] ) return ( @@ -252,7 +308,7 @@ const PipelineList = () => { /> pipelines?.length === 0 && searchTerm === undefined, diff --git a/web/src/pages/PullRequest/Conversation/Conversation.tsx b/web/src/pages/PullRequest/Conversation/Conversation.tsx index db16b7fad..28ea82341 100644 --- a/web/src/pages/PullRequest/Conversation/Conversation.tsx +++ b/web/src/pages/PullRequest/Conversation/Conversation.tsx @@ -113,7 +113,7 @@ export const Conversation: React.FC = ({ const allCommentBlock = blocks.filter(_activities => !isSystemComment(_activities)) const userCommentsOnly = allCommentBlock.filter(_activities => { const userCommentReply = _activities.filter( - authorIsUser => authorIsUser.payload?.author?.uid === currentUser.uid + authorIsUser => currentUser?.uid && authorIsUser.payload?.author?.uid === currentUser?.uid ) return userCommentReply.length !== 0 }) @@ -122,7 +122,7 @@ export const Conversation: React.FC = ({ } return blocks - }, [activities, dateOrderSort, activityFilter, currentUser.uid]) + }, [activities, dateOrderSort, activityFilter, currentUser?.uid]) const path = useMemo( () => `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}/comments`, [repoMetadata.path, pullRequestMetadata.number] diff --git a/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss b/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss index e84c78c0a..6a9f98e37 100644 --- a/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss +++ b/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss @@ -39,13 +39,26 @@ .row { height: 80px; - box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.08), 0px 0.5px 2px rgba(96, 97, 112, 0.16); overflow: hidden; &.noDesc > div { height: 44px; } } + .rowDisable { + background-color: var(--grey-50) !important; + cursor: auto !important; + .repoName { + color: var(--grey-400) !important; + } + .desc { + color: var(--grey-300) !important; + } + } + .rowDisable:hover { + box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.08), 0px 0.5px 2px rgba(96, 97, 112, 0.16) !important; + border: unset !important; + } } .nameContainer { @@ -102,3 +115,7 @@ padding-top: var(--spacing-xsmall) !important; } } + +.progressBar { + opacity: 0.7; +} diff --git a/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss.d.ts b/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss.d.ts index 14776345b..0d14bc1eb 100644 --- a/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss.d.ts +++ b/web/src/pages/RepositoriesListing/RepositoriesListing.module.scss.d.ts @@ -8,8 +8,10 @@ export declare const name: string export declare const nameContainer: string export declare const noDesc: string export declare const pinned: string +export declare const progressBar: string export declare const repoName: string export declare const repoScope: string export declare const row: string +export declare const rowDisable: string export declare const table: string export declare const withError: string diff --git a/web/src/pages/RepositoriesListing/RepositoriesListing.tsx b/web/src/pages/RepositoriesListing/RepositoriesListing.tsx index 31269b3d4..c2c2cb1ed 100644 --- a/web/src/pages/RepositoriesListing/RepositoriesListing.tsx +++ b/web/src/pages/RepositoriesListing/RepositoriesListing.tsx @@ -9,6 +9,7 @@ import { TableV2 as Table, Text } from '@harnessio/uicore' +import { ProgressBar, Intent } from '@blueprintjs/core' import { Icon } from '@harnessio/icons' import { Color } from '@harnessio/design-system' import type { CellProps, Column } from 'react-table' @@ -31,6 +32,10 @@ import { NoResultCard } from 'components/NoResultCard/NoResultCard' import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' import noRepoImage from './no-repo.svg' import css from './RepositoriesListing.module.scss' +import useSpaceSSE from 'hooks/useSpaceSSE' +interface TypesRepoExtended extends TypesRepository { + importing?: boolean +} export default function RepositoriesListing() { const { getString } = useStrings() @@ -56,6 +61,17 @@ export default function RepositoriesListing() { queryParams: { page, limit: LIST_FETCHING_LIMIT, query: searchTerm }, debounce: 500 }) + useSpaceSSE({ + space, + events: ['repository_import_completed'], + onEvent: data => { + // should I include pipeline id here? what if a new pipeline is created? coould check for ids that are higher than the lowest id on the page? + if (repositories?.some(repository => repository.id === data?.id && repository.parent_id === data?.parent_id)) { + //TODO - revisit full refresh - can I use the message to update the execution? + refetch() + } + } + }) useEffect(() => { setSearchTerm(undefined) @@ -64,12 +80,13 @@ export default function RepositoriesListing() { } }, [space, setPage]) // eslint-disable-line react-hooks/exhaustive-deps - const columns: Column[] = useMemo( + const columns: Column[] = useMemo( () => [ { Header: getString('repos.name'), width: 'calc(100% - 180px)', - Cell: ({ row }: CellProps) => { + + Cell: ({ row }: CellProps) => { const record = row.original return ( @@ -78,10 +95,16 @@ export default function RepositoriesListing() { {record.uid} - {record.description && ( + {record.importing ? ( - {record.description} + {getString('importProgress')} + ) : ( + record.description && ( + + {record.description} + + ) )} @@ -92,8 +115,12 @@ export default function RepositoriesListing() { { Header: getString('repos.updated'), width: '180px', - Cell: ({ row }: CellProps) => { - return ( + Cell: ({ row }: CellProps) => { + return row.original.importing ? ( + + + + ) : ( {formatDate(row.original.updated as number)} @@ -107,6 +134,7 @@ export default function RepositoriesListing() { ], [nameTextWidth, getString, searchTerm] ) + const onResize = useCallback(() => { if (rowContainerRef.current) { setNameTextWidth((rowContainerRef.current.closest('div[role="cell"]') as HTMLDivElement)?.offsetWidth - 100) @@ -118,8 +146,13 @@ export default function RepositoriesListing() { modalTitle={getString('createARepo')} text={getString('newRepo')} variation={ButtonVariation.PRIMARY} - icon="plus" - onSubmit={repoInfo => history.push(routes.toCODERepository({ repoPath: repoInfo.path as string }))} + onSubmit={repoInfo => { + if (repoInfo.importing) { + refetch() + } else { + history.push(routes.toCODERepository({ repoPath: repoInfo.path as string })) + } + }} /> ) @@ -155,12 +188,18 @@ export default function RepositoriesListing() { {!!repositories?.length && ( - + className={css.table} columns={columns} data={repositories || []} - onRowClick={repoInfo => history.push(routes.toCODERepository({ repoPath: repoInfo.path as string }))} - getRowClassName={row => cx(css.row, !row.original.description && css.noDesc)} + onRowClick={repoInfo => { + return repoInfo.importing + ? undefined + : history.push(routes.toCODERepository({ repoPath: repoInfo.path as string })) + }} + getRowClassName={row => + cx(css.row, !row.original.description && css.noDesc, row.original.importing && css.rowDisable) + } /> )} diff --git a/web/src/pages/Search/Search.tsx b/web/src/pages/Search/Search.tsx index c2c4cde52..d100b2f56 100644 --- a/web/src/pages/Search/Search.tsx +++ b/web/src/pages/Search/Search.tsx @@ -90,12 +90,15 @@ export default function Search() { error: resourceError = null, loading: resourceLoading } = useGetResourceContent({ repoMetadata, gitRef, resourcePath, includeCommit: false, lazy: !resourcePath }) - const fileContent = useMemo( + const fileContent: string = useMemo( () => resourceContent?.path === resourcePath ? decodeGitContent((resourceContent?.content as RepoFileContent)?.data) + : resourceError + ? getString('failedToFetchFileContent') : '', - [resourceContent?.content, resourceContent?.path, resourcePath] + + [resourceContent?.content, resourceContent?.path, resourcePath, resourceError, getString] ) // eslint-disable-next-line react-hooks/exhaustive-deps @@ -136,6 +139,14 @@ export default function Search() { } }, [repoMetadata?.path]) // eslint-disable-line react-hooks/exhaustive-deps + useEffect(() => { + if (fileContent && fileContent !== viewRef?.current?.state.doc.toString()) { + viewRef?.current?.dispatch({ + changes: { from: 0, to: viewRef?.current?.state.doc.length, insert: fileContent } + }) + } + }, [fileContent]) + useShowRequestError(resourceError) return ( @@ -260,7 +271,7 @@ const SearchResults: React.FC = ({ data, onSelect }) => { onSelect( item.file_name, item.file_path, - (item.content || []).join('\n').replace(/^\n/g, '').trim(), + (item.lines || []).join('\n').replace(/^\n/g, '').trim(), range(item.start_line, item.end_line) ) } @@ -282,7 +293,7 @@ const SearchResults: React.FC = ({ data, onSelect }) => { onSelect( item.file_name, item.file_path, - (item.content || []).join('\n').replace(/^\n/g, '').trim(), + (item.lines || []).join('\n').replace(/^\n/g, '').trim(), range(item.start_line, item.end_line) ) }}> @@ -306,7 +317,7 @@ const SearchResults: React.FC = ({ data, onSelect }) => { diff --git a/web/src/pages/SecretList/SecretList.tsx b/web/src/pages/SecretList/SecretList.tsx index a0b162639..aca84b286 100644 --- a/web/src/pages/SecretList/SecretList.tsx +++ b/web/src/pages/SecretList/SecretList.tsx @@ -29,6 +29,7 @@ import { ResourceListingPagination } from 'components/ResourceListingPagination/ import { NewSecretModalButton } from 'components/NewSecretModalButton/NewSecretModalButton' import { useConfirmAct } from 'hooks/useConfirmAction' import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' +import useUpdateSecretModal from 'components/UpdateSecretModal/UpdateSecretModal' import noSecretsImage from '../RepositoriesListing/no-repo.svg' import css from './SecretList.module.scss' @@ -61,6 +62,8 @@ const SecretList = () => { onSuccess={() => refetch()}> ) + const { openModal: openUpdateSecretModal } = useUpdateSecretModal() + const columns: Column[] = useMemo( () => [ { @@ -113,6 +116,11 @@ const SecretList = () => { isDark width="100px" items={[ + { + text: getString('edit'), + isDanger: true, + onClick: () => openUpdateSecretModal({ secretToUpdate: row.original, openSecretUpdate: refetch }) + }, { text: getString('delete'), isDanger: true, diff --git a/web/src/utils/PipelineUtils.ts b/web/src/utils/ExecutionUtils.ts similarity index 96% rename from web/src/utils/PipelineUtils.ts rename to web/src/utils/ExecutionUtils.ts index e7cfae3c3..c5b55e35b 100644 --- a/web/src/utils/PipelineUtils.ts +++ b/web/src/utils/ExecutionUtils.ts @@ -4,7 +4,7 @@ export const getStatus = (status: string | undefined): ExecutionState => { switch (status) { case 'success': return ExecutionState.SUCCESS - case 'failed': + case 'failure': return ExecutionState.FAILURE case 'running': return ExecutionState.RUNNING diff --git a/web/src/utils/GitUtils.ts b/web/src/utils/GitUtils.ts index 1e9c418f4..9be00976e 100644 --- a/web/src/utils/GitUtils.ts +++ b/web/src/utils/GitUtils.ts @@ -21,6 +21,54 @@ export interface GitInfoProps { commits: TypesCommit[] pullRequestMetadata: TypesPullReq } +export interface RepoFormData { + name: string + description: string + license: string + defaultBranch: string + gitignore: string + addReadme: boolean + isPublic: RepoVisibility +} +export interface ImportFormData { + repoUrl: string + username: string + password: string + name: string + description: string + isPublic: RepoVisibility +} + +export interface ExportFormData { + accountId: string + token: string + organization: string + name: string +} + +export interface ImportSpaceFormData { + gitProvider: string + username: string + password: string + name: string + description: string + organization: string +} + +export enum RepoVisibility { + PUBLIC = 'public', + PRIVATE = 'private' +} + +export enum RepoCreationType { + IMPORT = 'import', + CREATE = 'create' +} + +export enum SpaceCreationType { + IMPORT = 'import', + CREATE = 'create' +} export enum GitContentType { FILE = 'file', @@ -101,6 +149,11 @@ export const CodeIcon = { ChecksSuccess: 'success-tick' as IconName } +export enum Organization { + GITHUB = 'Github', + GITLAB = 'Gitlab' +} + export const REFS_TAGS_PREFIX = 'refs/tags/' // eslint-disable-next-line no-control-regex @@ -168,3 +221,17 @@ export const decodeGitContent = (content = '') => { } return '' } + +export const parseUrl = (url: string) => { + const pattern = /^(https?:\/\/(?:www\.)?(github|gitlab)\.com\/([^/]+\/[^/]+))/ + const match = url.match(pattern) + + if (match) { + const provider = match[2] + const fullRepo = match[3] + const repoName = match[3].split('/')[1] + return { provider, fullRepo, repoName } + } else { + return null + } +}