Merge remote-tracking branch 'origin' into abhinav/CODE-853

This commit is contained in:
Abhinav Singh 2023-09-13 17:31:45 -07:00
commit a309b44f0e
132 changed files with 4176 additions and 1042 deletions

3
.npmrc
View File

@ -1,3 +0,0 @@
@harness:registry=https://npm.pkg.github.com
//npm.pkg.github.com/:_authToken=${GITHUB_ACCESS_TOKEN}
always-auth=true

View File

@ -14,7 +14,6 @@ ARG GITHUB_ACCESS_TOKEN
# RUN npm ci --omit=dev
COPY ./web .
COPY .npmrc /root/.npmrc
RUN yarn && yarn build && yarn cache clean
@ -70,18 +69,21 @@ RUN apk --update add ca-certificates
# ---------------------------------------------------------#
FROM alpine/git:2.36.3 as final
RUN adduser -u 1001 -D -h /app iamuser
# setup app dir and its content
WORKDIR /app
RUN chown -R 1001:1001 /app
COPY --from=builder --chown=1001:1001 --chmod=700 /app/gitness /app/gitness
VOLUME /data
ENV XDG_CACHE_HOME /data
ENV GITRPC_SERVER_GIT_ROOT /data
ENV GITNESS_DATABASE_DRIVER sqlite3
ENV GITNESS_DATABASE_DATASOURCE /data/database.sqlite
ENV GITNESS_METRIC_ENABLED=true
ENV GITNESS_METRIC_ENDPOINT=https://stats.drone.ci/api/v1/gitness
COPY --from=builder /app/gitness /app/gitness
COPY --from=cert-image /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt
EXPOSE 3000
EXPOSE 3001
USER 1001
ENTRYPOINT [ "/app/gitness", "server" ]

View File

@ -41,8 +41,8 @@ import (
"github.com/harness/gitness/internal/bootstrap"
gitevents "github.com/harness/gitness/internal/events/git"
pullreqevents "github.com/harness/gitness/internal/events/pullreq"
"github.com/harness/gitness/internal/pipeline/canceler"
"github.com/harness/gitness/internal/pipeline/commit"
eventsstream "github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/pipeline/file"
"github.com/harness/gitness/internal/pipeline/manager"
"github.com/harness/gitness/internal/pipeline/runner"
@ -57,6 +57,7 @@ import (
pullreqservice "github.com/harness/gitness/internal/services/pullreq"
"github.com/harness/gitness/internal/services/trigger"
"github.com/harness/gitness/internal/services/webhook"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/store/cache"
"github.com/harness/gitness/internal/store/database"
@ -130,12 +131,13 @@ func initSystem(ctx context.Context, config *types.Config) (*cliserver.System, e
triggerer.WireSet,
file.WireSet,
runner.WireSet,
eventsstream.WireSet,
sse.WireSet,
scheduler.WireSet,
commit.WireSet,
controllertrigger.WireSet,
plugin.WireSet,
importer.WireSet,
canceler.WireSet,
exporter.WireSet,
)
return &cliserver.System{}, nil

View File

@ -10,7 +10,7 @@ import (
"context"
"github.com/harness/gitness/cli/server"
"github.com/harness/gitness/encrypt"
events2 "github.com/harness/gitness/events"
"github.com/harness/gitness/events"
"github.com/harness/gitness/gitrpc"
server3 "github.com/harness/gitness/gitrpc/server"
"github.com/harness/gitness/gitrpc/server/cron"
@ -36,10 +36,10 @@ import (
"github.com/harness/gitness/internal/auth/authn"
"github.com/harness/gitness/internal/auth/authz"
"github.com/harness/gitness/internal/bootstrap"
events4 "github.com/harness/gitness/internal/events/git"
events3 "github.com/harness/gitness/internal/events/pullreq"
events3 "github.com/harness/gitness/internal/events/git"
events2 "github.com/harness/gitness/internal/events/pullreq"
"github.com/harness/gitness/internal/pipeline/canceler"
"github.com/harness/gitness/internal/pipeline/commit"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/pipeline/file"
"github.com/harness/gitness/internal/pipeline/manager"
"github.com/harness/gitness/internal/pipeline/runner"
@ -55,6 +55,7 @@ import (
"github.com/harness/gitness/internal/services/pullreq"
trigger2 "github.com/harness/gitness/internal/services/trigger"
"github.com/harness/gitness/internal/services/webhook"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/store/cache"
"github.com/harness/gitness/internal/store/database"
@ -125,26 +126,28 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro
if err != nil {
return nil, err
}
repository, err := importer.ProvideRepoImporter(config, provider, gitrpcInterface, repoStore, encrypter, jobScheduler, executor)
streamer := sse.ProvideEventsStreaming(pubSub)
repository, err := importer.ProvideRepoImporter(config, provider, gitrpcInterface, repoStore, encrypter, jobScheduler, executor, streamer)
if err != nil {
return nil, err
}
repoController := repo.ProvideController(config, db, provider, pathUID, authorizer, pathStore, repoStore, spaceStore, pipelineStore, principalStore, gitrpcInterface, repository)
executionStore := database.ProvideExecutionStore(db)
commitService := commit.ProvideCommitService(gitrpcInterface)
stageStore := database.ProvideStageStore(db)
fileService := file.ProvideFileService(gitrpcInterface)
schedulerScheduler, err := scheduler.ProvideScheduler(stageStore, mutexManager)
if err != nil {
return nil, err
}
triggererTriggerer := triggerer.ProvideTriggerer(executionStore, stageStore, db, pipelineStore, fileService, schedulerScheduler, repoStore)
executionController := execution.ProvideController(db, authorizer, executionStore, commitService, triggererTriggerer, repoStore, stageStore, pipelineStore)
stepStore := database.ProvideStepStore(db)
cancelerCanceler := canceler.ProvideCanceler(executionStore, streamer, repoStore, schedulerScheduler, stageStore, stepStore)
commitService := commit.ProvideCommitService(gitrpcInterface)
checkStore := database.ProvideCheckStore(db, principalInfoCache)
fileService := file.ProvideFileService(gitrpcInterface)
triggererTriggerer := triggerer.ProvideTriggerer(executionStore, checkStore, stageStore, db, pipelineStore, fileService, schedulerScheduler, repoStore)
executionController := execution.ProvideController(db, authorizer, executionStore, cancelerCanceler, commitService, triggererTriggerer, repoStore, stageStore, pipelineStore)
logStore := logs.ProvideLogStore(db, config)
logStream := livelog.ProvideLogStream(config)
logsController := logs2.ProvideController(db, authorizer, executionStore, repoStore, pipelineStore, stageStore, stepStore, logStore, logStream)
eventsStreamer := events.ProvideEventsStreaming(pubSub)
secretStore := database.ProvideSecretStore(db)
connectorStore := database.ProvideConnectorStore(db)
templateStore := database.ProvideTemplateStore(db)
@ -152,7 +155,7 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro
if err != nil {
return nil, err
}
spaceController := space.ProvideController(db, provider, eventsStreamer, pathUID, authorizer, pathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, repoController, membershipStore, repository, exporterRepository)
spaceController := space.ProvideController(db, provider, streamer, pathUID, authorizer, pathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, repoController, membershipStore, repository, exporterRepository)
triggerStore := database.ProvideTriggerStore(db)
pipelineController := pipeline.ProvideController(db, pathUID, pathStore, repoStore, triggerStore, authorizer, pipelineStore)
secretController := secret.ProvideController(db, pathUID, pathStore, encrypter, secretStore, authorizer, spaceStore)
@ -170,20 +173,20 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro
if err != nil {
return nil, err
}
eventsSystem, err := events2.ProvideSystem(eventsConfig, universalClient)
eventsSystem, err := events.ProvideSystem(eventsConfig, universalClient)
if err != nil {
return nil, err
}
reporter, err := events3.ProvideReporter(eventsSystem)
reporter, err := events2.ProvideReporter(eventsSystem)
if err != nil {
return nil, err
}
migrator := codecomments.ProvideMigrator(gitrpcInterface)
readerFactory, err := events4.ProvideReaderFactory(eventsSystem)
readerFactory, err := events3.ProvideReaderFactory(eventsSystem)
if err != nil {
return nil, err
}
eventsReaderFactory, err := events3.ProvideReaderFactory(eventsSystem)
eventsReaderFactory, err := events2.ProvideReaderFactory(eventsSystem)
if err != nil {
return nil, err
}
@ -202,14 +205,13 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro
return nil, err
}
webhookController := webhook2.ProvideController(webhookConfig, db, authorizer, webhookStore, webhookExecutionStore, repoStore, webhookService)
eventsReporter, err := events4.ProvideReporter(eventsSystem)
eventsReporter, err := events3.ProvideReporter(eventsSystem)
if err != nil {
return nil, err
}
githookController := githook.ProvideController(db, authorizer, principalStore, repoStore, eventsReporter)
serviceaccountController := serviceaccount.NewController(principalUID, authorizer, principalStore, spaceStore, repoStore, tokenStore)
principalController := principal.ProvideController(principalStore)
checkStore := database.ProvideCheckStore(db, principalInfoCache)
checkController := check2.ProvideController(db, authorizer, repoStore, checkStore, gitrpcInterface)
systemController := system.NewController(principalStore, config)
apiHandler := router.ProvideAPIHandler(config, authenticator, repoController, executionController, logsController, spaceController, pipelineController, secretController, triggerController, connectorController, templateController, pluginController, pullreqController, webhookController, githookController, serviceaccountController, controller, principalController, checkController, systemController)
@ -217,7 +219,7 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro
webHandler := router.ProvideWebHandler(config)
routerRouter := router.ProvideRouter(config, apiHandler, gitHandler, webHandler)
serverServer := server2.ProvideServer(config, routerRouter)
executionManager := manager.ProvideExecutionManager(config, executionStore, pipelineStore, provider, eventsStreamer, fileService, logStore, logStream, repoStore, schedulerScheduler, secretStore, stageStore, stepStore, principalStore)
executionManager := manager.ProvideExecutionManager(config, executionStore, pipelineStore, provider, streamer, fileService, logStore, logStream, checkStore, repoStore, schedulerScheduler, secretStore, stageStore, stepStore, principalStore)
client := manager.ProvideExecutionClient(executionManager, config)
runtimeRunner, err := runner.ProvideExecutionRunner(config, client, executionManager)
if err != nil {

View File

@ -74,6 +74,10 @@ func (in *ReportInput) Validate() error {
}
in.Payload.Data = payloadDataJSON
case enum.CheckPayloadKindPipeline:
return usererror.BadRequest("Kind cannot be pipeline for external checks")
}
return nil

View File

@ -14,23 +14,18 @@ import (
"github.com/harness/gitness/types/enum"
)
type UpdateInput struct {
Status string `json:"status"`
}
func (c *Controller) Update(
func (c *Controller) Cancel(
ctx context.Context,
session *auth.Session,
repoRef string,
pipelineUID string,
executionNum int64,
in *UpdateInput) (*types.Execution, error) {
) (*types.Execution, error) {
repo, err := c.repoStore.FindByRef(ctx, repoRef)
if err != nil {
return nil, fmt.Errorf("failed to find repo by ref: %w", err)
}
err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit)
err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineExecute)
if err != nil {
return nil, fmt.Errorf("failed to authorize: %w", err)
}
@ -42,16 +37,13 @@ func (c *Controller) Update(
execution, err := c.executionStore.FindByNumber(ctx, pipeline.ID, executionNum)
if err != nil {
return nil, fmt.Errorf("failed to find execution: %w", err)
return nil, fmt.Errorf("failed to find execution %d: %w", executionNum, err)
}
return c.executionStore.UpdateOptLock(ctx,
execution, func(original *types.Execution) error {
// update values only if provided
if in.Status != "" {
original.Status = in.Status
}
err = c.canceler.Cancel(ctx, repo, execution)
if err != nil {
return nil, fmt.Errorf("unable to cancel execution: %w", err)
}
return nil
})
return execution, nil
}

View File

@ -6,6 +6,7 @@ package execution
import (
"github.com/harness/gitness/internal/auth/authz"
"github.com/harness/gitness/internal/pipeline/canceler"
"github.com/harness/gitness/internal/pipeline/commit"
"github.com/harness/gitness/internal/pipeline/triggerer"
"github.com/harness/gitness/internal/store"
@ -17,6 +18,7 @@ type Controller struct {
db *sqlx.DB
authorizer authz.Authorizer
executionStore store.ExecutionStore
canceler canceler.Canceler
commitService commit.CommitService
triggerer triggerer.Triggerer
repoStore store.RepoStore
@ -28,6 +30,7 @@ func NewController(
db *sqlx.DB,
authorizer authz.Authorizer,
executionStore store.ExecutionStore,
canceler canceler.Canceler,
commitService commit.CommitService,
triggerer triggerer.Triggerer,
repoStore store.RepoStore,
@ -38,6 +41,7 @@ func NewController(
db: db,
authorizer: authorizer,
executionStore: executionStore,
canceler: canceler,
commitService: commitService,
triggerer: triggerer,
repoStore: repoStore,

View File

@ -60,6 +60,7 @@ func (c *Controller) Create(
hook := &triggerer.Hook{
Trigger: session.Principal.UID, // who/what triggered the build, different from commit author
AuthorLogin: commit.Author.Identity.Name,
TriggeredBy: session.Principal.ID,
AuthorName: commit.Author.Identity.Name,
AuthorEmail: commit.Author.Identity.Email,
Ref: ref,

View File

@ -6,6 +6,7 @@ package execution
import (
"github.com/harness/gitness/internal/auth/authz"
"github.com/harness/gitness/internal/pipeline/canceler"
"github.com/harness/gitness/internal/pipeline/commit"
"github.com/harness/gitness/internal/pipeline/triggerer"
"github.com/harness/gitness/internal/store"
@ -22,12 +23,13 @@ var WireSet = wire.NewSet(
func ProvideController(db *sqlx.DB,
authorizer authz.Authorizer,
executionStore store.ExecutionStore,
canceler canceler.Canceler,
commitService commit.CommitService,
triggerer triggerer.Triggerer,
repoStore store.RepoStore,
stageStore store.StageStore,
pipelineStore store.PipelineStore,
) *Controller {
return NewController(db, authorizer, executionStore, commitService,
return NewController(db, authorizer, executionStore, canceler, commitService,
triggerer, repoStore, stageStore, pipelineStore)
}

View File

@ -59,6 +59,7 @@ func (c *Controller) Create(
Description: in.Description,
RepoID: repo.ID,
UID: in.UID,
CreatedBy: session.Principal.ID,
Seq: 0,
DefaultBranch: in.DefaultBranch,
ConfigPath: in.ConfigPath,
@ -84,8 +85,8 @@ func (c *Controller) Create(
UID: "default",
Actions: []enum.TriggerAction{enum.TriggerActionPullReqCreated,
enum.TriggerActionPullReqReopened, enum.TriggerActionPullReqBranchUpdated},
Enabled: true,
Version: 0,
Disabled: false,
Version: 0,
}
err = c.triggerStore.Create(ctx, trigger)
if err != nil {

View File

@ -108,7 +108,7 @@ func (c *Controller) GetContent(ctx context.Context,
IncludeLatestCommit: includeLatestCommit,
})
if err != nil {
return nil, err
return nil, fmt.Errorf("failed to read tree node: %w", err)
}
info, err := mapToContentInfo(treeNodeOutput.Node, treeNodeOutput.Commit, includeLatestCommit)

View File

@ -40,10 +40,7 @@ func (c *Controller) Delete(ctx context.Context, session *auth.Session, repoRef
log.Ctx(ctx).Info().Msgf("Delete request received for repo %s , id: %d", repo.Path, repo.ID)
// TODO: uncomment when soft delete is implemented
// return c.DeleteNoAuth(ctx, session, repo)
return nil
return c.DeleteNoAuth(ctx, session, repo)
}
func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, repo *types.Repository) error {

View File

@ -7,11 +7,11 @@ package repo
import (
"context"
"fmt"
"github.com/harness/gitness/internal/api/usererror"
"github.com/harness/gitness/internal/auth"
"github.com/harness/gitness/internal/paths"
"github.com/harness/gitness/internal/services/importer"
"github.com/harness/gitness/internal/services/job"
"github.com/harness/gitness/store/database/dbtx"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
@ -43,11 +43,6 @@ func (c *Controller) Import(ctx context.Context, session *auth.Session, in *Impo
return nil, err
}
jobUID, err := job.UID()
if err != nil {
return nil, fmt.Errorf("error creating job UID: %w", err)
}
var repo *types.Repository
err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error {
// lock parent space path to ensure it doesn't get updated while we setup new repo
@ -57,7 +52,7 @@ func (c *Controller) Import(ctx context.Context, session *auth.Session, in *Impo
}
pathToRepo := paths.Concatinate(spacePath.Value, in.UID)
repo = remoteRepository.ToRepo(parentSpace.ID, pathToRepo, in.UID, in.Description, jobUID, &session.Principal)
repo = remoteRepository.ToRepo(parentSpace.ID, pathToRepo, in.UID, in.Description, &session.Principal)
err = c.repoStore.Create(ctx, repo)
if err != nil {

View File

@ -42,7 +42,7 @@ func (c *Controller) Raw(ctx context.Context,
IncludeLatestCommit: false,
})
if err != nil {
return nil, 0, err
return nil, 0, fmt.Errorf("failed to read tree node: %w", err)
}
// viewing Raw content is only supported for blob content

View File

@ -51,6 +51,7 @@ func (c *Controller) Create(ctx context.Context, session *auth.Session, in *Crea
var secret *types.Secret
now := time.Now().UnixMilli()
secret = &types.Secret{
CreatedBy: session.Principal.ID,
Description: in.Description,
Data: in.Data,
SpaceID: parentSpace.ID,

View File

@ -7,9 +7,9 @@ package space
import (
"github.com/harness/gitness/internal/api/controller/repo"
"github.com/harness/gitness/internal/auth/authz"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/services/exporter"
"github.com/harness/gitness/internal/services/importer"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/url"
"github.com/harness/gitness/types/check"
@ -20,7 +20,7 @@ import (
type Controller struct {
db *sqlx.DB
urlProvider *url.Provider
eventsStream events.EventsStreamer
sseStreamer sse.Streamer
uidCheck check.PathUID
authorizer authz.Authorizer
pathStore store.PathStore
@ -37,7 +37,7 @@ type Controller struct {
exporter *exporter.Repository
}
func NewController(db *sqlx.DB, urlProvider *url.Provider, eventsStream events.EventsStreamer,
func NewController(db *sqlx.DB, urlProvider *url.Provider, sseStreamer sse.Streamer,
uidCheck check.PathUID, authorizer authz.Authorizer,
pathStore store.PathStore, pipelineStore store.PipelineStore, secretStore store.SecretStore,
connectorStore store.ConnectorStore, templateStore store.TemplateStore, spaceStore store.SpaceStore,
@ -47,7 +47,7 @@ func NewController(db *sqlx.DB, urlProvider *url.Provider, eventsStream events.E
return &Controller{
db: db,
urlProvider: urlProvider,
eventsStream: eventsStream,
sseStreamer: sseStreamer,
uidCheck: uidCheck,
authorizer: authorizer,
pathStore: pathStore,

View File

@ -13,8 +13,6 @@ import (
"github.com/harness/gitness/internal/auth"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/rs/zerolog/log"
)
// Delete deletes a space.
@ -26,13 +24,12 @@ func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef
if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceDelete, false); err != nil {
return err
}
// TODO: uncomment when soft delete is implemented
log.Ctx(ctx).Info().Msgf("Delete request received for space %s", space.Path)
// return c.DeleteNoAuth(ctx, session, space.ID)
return nil
return c.DeleteNoAuth(ctx, session, space.ID)
}
// DeleteNoAuth bypasses PermissionSpaceDelete, PermissionSpaceView, PermissionRepoView, and PermissionRepoDelete.
// DeleteNoAuth deletes the space - no authorization is verified.
// WARNING this is meant for internal calls only.
func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error {
filter := &types.SpaceFilter{
Page: 1,
@ -62,10 +59,25 @@ func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, sp
return nil
}
func (c *Controller) DeleteWithPathNoAuth(ctx context.Context, session *auth.Session, spacePath string) error {
space, err := c.spaceStore.FindByRef(ctx, spacePath)
if err != nil {
return err
// deleteRepositoriesNoAuth deletes all repositories in a space - no authorization is verified.
// WARNING this is meant for internal calls only.
func (c *Controller) deleteRepositoriesNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error {
filter := &types.RepoFilter{
Page: 1,
Size: int(math.MaxInt),
Query: "",
Order: enum.OrderAsc,
Sort: enum.RepoAttrNone,
}
return c.DeleteNoAuth(ctx, session, space.ID)
repos, _, err := c.ListRepositoriesNoAuth(ctx, spaceID, filter)
if err != nil {
return fmt.Errorf("failed to list space repositories: %w", err)
}
for _, repo := range repos {
err = c.repoCtrl.DeleteNoAuth(ctx, session, repo)
if err != nil {
return fmt.Errorf("failed to delete repository %d: %w", repo.ID, err)
}
}
return nil
}

View File

@ -1,38 +0,0 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package space
import (
"context"
"fmt"
"math"
"github.com/harness/gitness/internal/auth"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
)
// deleteRepositoriesNoAuth does not check PermissionRepoView, and PermissionRepoDelete permissions.
// Call this through Delete(Space) api to make sure the caller has DeleteSpace permission.
func (c *Controller) deleteRepositoriesNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error {
filter := &types.RepoFilter{
Page: 1,
Size: int(math.MaxInt),
Query: "",
Order: enum.OrderAsc,
Sort: enum.RepoAttrNone,
}
repos, _, err := c.ListRepositoriesNoAuth(ctx, spaceID, filter)
if err != nil {
return fmt.Errorf("failed to list space repositories: %w", err)
}
for _, repo := range repos {
err = c.repoCtrl.DeleteNoAuth(ctx, session, repo)
if err != nil {
return fmt.Errorf("failed to delete repository %d: %w", repo.ID, err)
}
}
return nil
}

View File

@ -39,16 +39,21 @@ func (c *Controller) Events(
return fmt.Errorf("failed to authorize stream: %w", err)
}
ctx, cancel := context.WithTimeout(ctx, tailMaxTime)
defer cancel()
ctx, ctxCancel := context.WithTimeout(ctx, tailMaxTime)
defer ctxCancel()
io.WriteString(w, ": ping\n\n")
w.Flush()
events, errc, consumer := c.eventsStream.Subscribe(ctx, space.ID)
defer c.eventsStream.Unsubscribe(ctx, consumer)
eventStream, errorStream, sseCancel := c.sseStreamer.Stream(ctx, space.ID)
defer func() {
uerr := sseCancel(ctx)
if uerr != nil {
log.Ctx(ctx).Warn().Err(uerr).Msgf("failed to cancel sse stream for space '%s'", space.Path)
}
}()
// could not get error channel
if errc == nil {
if errorStream == nil {
io.WriteString(w, "event: error\ndata: eof\n\n")
w.Flush()
return fmt.Errorf("could not get error channel")
@ -72,17 +77,20 @@ L:
case <-ctx.Done():
log.Debug().Msg("events: stream cancelled")
break L
case err := <-errc:
case err := <-errorStream:
log.Err(err).Msg("events: received error in the tail channel")
break L
case <-pingTimer.C:
// if time b/w messages takes longer, send a ping
io.WriteString(w, ": ping\n\n")
w.Flush()
case event := <-events:
case event := <-eventStream:
io.WriteString(w, fmt.Sprintf("event: %s\n", event.Type))
io.WriteString(w, "data: ")
enc.Encode(event)
io.WriteString(w, "\n\n")
enc.Encode(event.Data)
// NOTE: enc.Encode is ending the data with a new line, only add one more
// Source: https://cs.opensource.google/go/go/+/refs/tags/go1.21.1:src/encoding/json/stream.go;l=220
io.WriteString(w, "\n")
w.Flush()
}
}

View File

@ -16,7 +16,6 @@ import (
"github.com/harness/gitness/internal/bootstrap"
"github.com/harness/gitness/internal/paths"
"github.com/harness/gitness/internal/services/importer"
"github.com/harness/gitness/internal/services/job"
"github.com/harness/gitness/store/database/dbtx"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/check"
@ -131,16 +130,9 @@ func (c *Controller) Import(ctx context.Context, session *auth.Session, in *Impo
}
for i, remoteRepository := range remoteRepositories {
var jobUID string
jobUID, err = job.UID()
if err != nil {
return fmt.Errorf("error creating job UID: %w", err)
}
pathToRepo := paths.Concatinate(path.Value, remoteRepository.UID)
repo := remoteRepository.ToRepo(
space.ID, pathToRepo, remoteRepository.UID, "", jobUID, &session.Principal)
space.ID, pathToRepo, remoteRepository.UID, "", &session.Principal)
err = c.repoStore.Create(ctx, repo)
if err != nil {

View File

@ -7,9 +7,9 @@ package space
import (
"github.com/harness/gitness/internal/api/controller/repo"
"github.com/harness/gitness/internal/auth/authz"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/services/exporter"
"github.com/harness/gitness/internal/services/importer"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/url"
"github.com/harness/gitness/types/check"
@ -23,14 +23,14 @@ var WireSet = wire.NewSet(
ProvideController,
)
func ProvideController(db *sqlx.DB, urlProvider *url.Provider, eventsStream events.EventsStreamer,
func ProvideController(db *sqlx.DB, urlProvider *url.Provider, sseStreamer sse.Streamer,
uidCheck check.PathUID, authorizer authz.Authorizer, pathStore store.PathStore,
pipelineStore store.PipelineStore, secretStore store.SecretStore,
connectorStore store.ConnectorStore, templateStore store.TemplateStore,
spaceStore store.SpaceStore, repoStore store.RepoStore, principalStore store.PrincipalStore,
repoCtrl *repo.Controller, membershipStore store.MembershipStore, importer *importer.Repository, exporter *exporter.Repository,
) *Controller {
return NewController(db, urlProvider, eventsStream, uidCheck, authorizer,
return NewController(db, urlProvider, sseStreamer, uidCheck, authorizer,
pathStore, pipelineStore, secretStore,
connectorStore, templateStore,
spaceStore, repoStore, principalStore,

View File

@ -21,7 +21,7 @@ type CreateInput struct {
Description string `json:"description"`
UID string `json:"uid"`
Secret string `json:"secret"`
Enabled bool `json:"enabled"`
Disabled bool `json:"disabled"`
Actions []enum.TriggerAction `json:"actions"`
}
@ -56,7 +56,7 @@ func (c *Controller) Create(
now := time.Now().UnixMilli()
trigger := &types.Trigger{
Description: in.Description,
Enabled: in.Enabled,
Disabled: in.Disabled,
Secret: in.Secret,
CreatedBy: session.Principal.ID,
RepoID: repo.ID,

View File

@ -21,7 +21,7 @@ type UpdateInput struct {
UID *string `json:"uid"`
Actions []enum.TriggerAction `json:"actions"`
Secret *string `json:"secret"`
Enabled *bool `json:"enabled"` // can be nil, so keeping it a pointer
Disabled *bool `json:"disabled"` // can be nil, so keeping it a pointer
}
func (c *Controller) Update(
@ -72,8 +72,8 @@ func (c *Controller) Update(
if in.Secret != nil {
original.Secret = *in.Secret
}
if in.Enabled != nil {
original.Enabled = *in.Enabled
if in.Disabled != nil {
original.Disabled = *in.Disabled
}
return nil

View File

@ -1,11 +1,9 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package execution
import (
"encoding/json"
"net/http"
"github.com/harness/gitness/internal/api/controller/execution"
@ -13,40 +11,32 @@ import (
"github.com/harness/gitness/internal/api/request"
)
func HandleUpdate(executionCtrl *execution.Controller) http.HandlerFunc {
func HandleCancel(executionCtrl *execution.Controller) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
session, _ := request.AuthSessionFrom(ctx)
in := new(execution.UpdateInput)
err := json.NewDecoder(r.Body).Decode(in)
if err != nil {
render.BadRequestf(w, "Invalid Request Body: %s.", err)
return
}
pipelineUID, err := request.GetPipelineUIDFromPath(r)
if err != nil {
render.TranslatedUserError(w, err)
return
}
repoRef, err := request.GetRepoRefFromPath(r)
if err != nil {
render.TranslatedUserError(w, err)
return
}
n, err := request.GetExecutionNumberFromPath(r)
if err != nil {
render.TranslatedUserError(w, err)
return
}
pipeline, err := executionCtrl.Update(ctx, session, repoRef, pipelineUID, n, in)
repoRef, err := request.GetRepoRefFromPath(r)
if err != nil {
render.TranslatedUserError(w, err)
return
}
render.JSON(w, http.StatusOK, pipeline)
execution, err := executionCtrl.Cancel(ctx, session, repoRef, pipelineUID, n)
if err != nil {
render.TranslatedUserError(w, err)
return
}
render.JSON(w, http.StatusOK, execution)
}
}

View File

@ -15,9 +15,9 @@ import (
"github.com/rs/zerolog/log"
)
// HandleEventsStream returns an http.HandlerFunc that watches for
// HandleEvents returns an http.HandlerFunc that watches for
// events on a space
func HandleEventsStream(spaceCtrl *space.Controller) http.HandlerFunc {
func HandleEvents(spaceCtrl *space.Controller) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
session, _ := request.AuthSessionFrom(ctx)

View File

@ -7,7 +7,6 @@ package openapi
import (
"net/http"
"github.com/harness/gitness/internal/api/controller/execution"
"github.com/harness/gitness/internal/api/controller/pipeline"
"github.com/harness/gitness/internal/api/controller/trigger"
"github.com/harness/gitness/internal/api/request"
@ -66,11 +65,6 @@ type getPipelineRequest struct {
pipelineRequest
}
type updateExecutionRequest struct {
executionRequest
execution.UpdateInput
}
type updateTriggerRequest struct {
triggerRequest
trigger.UpdateInput
@ -193,6 +187,18 @@ func pipelineOperations(reflector *openapi3.Reflector) {
_ = reflector.Spec.AddOperation(http.MethodGet,
"/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionFind)
executionCancel := openapi3.Operation{}
executionCancel.WithTags("pipeline")
executionCancel.WithMapOfAnything(map[string]interface{}{"operationId": "cancelExecution"})
_ = reflector.SetRequest(&executionCancel, new(getExecutionRequest), http.MethodPost)
_ = reflector.SetJSONResponse(&executionCancel, new(types.Execution), http.StatusOK)
_ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusInternalServerError)
_ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusUnauthorized)
_ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusForbidden)
_ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusNotFound)
_ = reflector.Spec.AddOperation(http.MethodPost,
"/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}/cancel", executionCancel)
executionDelete := openapi3.Operation{}
executionDelete.WithTags("pipeline")
executionDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteExecution"})
@ -205,19 +211,6 @@ func pipelineOperations(reflector *openapi3.Reflector) {
_ = reflector.Spec.AddOperation(http.MethodDelete,
"/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionDelete)
executionUpdate := openapi3.Operation{}
executionUpdate.WithTags("pipeline")
executionUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateExecution"})
_ = reflector.SetRequest(&executionUpdate, new(updateExecutionRequest), http.MethodPatch)
_ = reflector.SetJSONResponse(&executionUpdate, new(types.Execution), http.StatusOK)
_ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusBadRequest)
_ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusInternalServerError)
_ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusUnauthorized)
_ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusForbidden)
_ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusNotFound)
_ = reflector.Spec.AddOperation(http.MethodPatch,
"/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionUpdate)
executionList := openapi3.Operation{}
executionList.WithTags("pipeline")
executionList.WithMapOfAnything(map[string]interface{}{"operationId": "listExecutions"})

View File

@ -0,0 +1,143 @@
package canceler
import (
"context"
"fmt"
"time"
"github.com/harness/gitness/internal/pipeline/scheduler"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/rs/zerolog/log"
)
type service struct {
executionStore store.ExecutionStore
sseStreamer sse.Streamer
repoStore store.RepoStore
scheduler scheduler.Scheduler
stageStore store.StageStore
stepStore store.StepStore
}
// Canceler cancels a build.
type Canceler interface {
// Cancel cancels the provided execution.
Cancel(ctx context.Context, repo *types.Repository, execution *types.Execution) error
}
// New returns a cancellation service that encapsulates
// all cancellation operations.
func New(
executionStore store.ExecutionStore,
sseStreamer sse.Streamer,
repoStore store.RepoStore,
scheduler scheduler.Scheduler,
stageStore store.StageStore,
stepStore store.StepStore,
) Canceler {
return &service{
executionStore: executionStore,
sseStreamer: sseStreamer,
repoStore: repoStore,
scheduler: scheduler,
stageStore: stageStore,
stepStore: stepStore,
}
}
func (s *service) Cancel(ctx context.Context, repo *types.Repository, execution *types.Execution) error {
log := log.With().
Int64("execution.id", execution.ID).
Str("execution.status", string(execution.Status)).
Str("execution.Ref", execution.Ref).
Logger()
// do not cancel the build if the build status is
// complete. only cancel the build if the status is
// running or pending.
switch execution.Status {
case enum.CIStatusPending, enum.CIStatusRunning:
default:
return nil
}
// update the build status to killed. if the update fails
// due to an optimistic lock error it means the build has
// already started, and should now be ignored.
now := time.Now().UnixMilli()
execution.Status = enum.CIStatusKilled
execution.Finished = now
if execution.Started == 0 {
execution.Started = now
}
err := s.executionStore.Update(ctx, execution)
if err != nil {
return fmt.Errorf("could not update execution status to canceled: %w", err)
}
stages, err := s.stageStore.ListWithSteps(ctx, execution.ID)
if err != nil {
return fmt.Errorf("could not list stages with steps: %w", err)
}
// update the status of all steps to indicate they
// were killed or skipped.
for _, stage := range stages {
if stage.Status.IsDone() {
continue
}
if stage.Started != 0 {
stage.Status = enum.CIStatusKilled
} else {
stage.Status = enum.CIStatusSkipped
stage.Started = now
}
stage.Stopped = now
err := s.stageStore.Update(ctx, stage)
if err != nil {
log.Debug().Err(err).
Int64("stage.number", stage.Number).
Msg("canceler: cannot update stage status")
}
// update the status of all steps to indicate they
// were killed or skipped.
for _, step := range stage.Steps {
if step.Status.IsDone() {
continue
}
if step.Started != 0 {
step.Status = enum.CIStatusKilled
} else {
step.Status = enum.CIStatusSkipped
step.Started = now
}
step.Stopped = now
step.ExitCode = 130
err := s.stepStore.Update(ctx, step)
if err != nil {
log.Debug().Err(err).
Int64("stage.number", stage.Number).
Int64("step.number", step.Number).
Msg("canceler: cannot update step status")
}
}
}
execution.Stages = stages
log.Info().Msg("canceler: successfully cancelled build")
// trigger a SSE to notify subscribers that
// the execution was cancelled.
err = s.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypeExecutionCanceled, execution)
if err != nil {
log.Debug().Err(err).Msg("canceler: failed to publish server-sent event")
}
return nil
}

View File

@ -0,0 +1,29 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package canceler
import (
"github.com/harness/gitness/internal/pipeline/scheduler"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/google/wire"
)
// WireSet provides a wire set for this package.
var WireSet = wire.NewSet(
ProvideCanceler,
)
// ProvideExecutionManager provides an execution manager.
func ProvideCanceler(
executionStore store.ExecutionStore,
sseStreamer sse.Streamer,
repoStore store.RepoStore,
scheduler scheduler.Scheduler,
stageStore store.StageStore,
stepStore store.StepStore) Canceler {
return New(executionStore, sseStreamer, repoStore, scheduler, stageStore, stepStore)
}

View File

@ -0,0 +1,57 @@
package checks
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
)
// Write is a util function which writes execution and pipeline state to the
// check store.
func Write(
ctx context.Context,
checkStore store.CheckStore,
execution *types.Execution,
pipeline *types.Pipeline,
) error {
payload := types.CheckPayloadInternal{
Number: execution.Number,
RepoID: execution.RepoID,
PipelineID: execution.PipelineID,
}
data, err := json.Marshal(payload)
if err != nil {
return fmt.Errorf("could not marshal check payload: %w", err)
}
now := time.Now().UnixMilli()
summary := pipeline.Description
if summary == "" {
summary = pipeline.UID
}
check := &types.Check{
RepoID: execution.RepoID,
UID: pipeline.UID,
Summary: summary,
Created: now,
Updated: now,
CreatedBy: execution.CreatedBy,
Status: execution.Status.ConvertToCheckStatus(),
CommitSHA: execution.After,
Metadata: []byte("{}"),
Payload: types.CheckPayload{
Version: "1",
Kind: enum.CheckPayloadKindPipeline,
Data: data,
},
}
err = checkStore.Upsert(ctx, check)
if err != nil {
return fmt.Errorf("could not upsert to check store: %w", err)
}
return nil
}

View File

@ -1,83 +0,0 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package events
import (
"context"
"encoding/json"
"strconv"
"github.com/harness/gitness/pubsub"
"github.com/harness/gitness/types/enum"
)
// Event is an event which is sent to the UI via server-sent events.
type Event struct {
Type enum.EventType `json:"type"`
Data json.RawMessage `json:"data"`
}
type EventsStreamer interface {
// Publish publishes an event to a given space ID.
Publish(ctx context.Context, spaceID int64, event *Event) error
// Subscribe listens to events on a space ID.
Subscribe(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, pubsub.Consumer)
// Unsubscribe unsubscribes the consumer.
Unsubscribe(ctx context.Context, consumer pubsub.Consumer) error
}
type event struct {
pubsub pubsub.PubSub
topic string
}
func New(pubsub pubsub.PubSub, topic string) EventsStreamer {
return &event{
pubsub: pubsub,
topic: topic,
}
}
func (e *event) Publish(ctx context.Context, spaceID int64, event *Event) error {
bytes, err := json.Marshal(event)
if err != nil {
return err
}
option := pubsub.WithPublishNamespace(format(spaceID))
return e.pubsub.Publish(ctx, e.topic, bytes, option)
}
// format creates the namespace name which will be spaces-<id>
func format(id int64) string {
return "spaces-" + strconv.Itoa(int(id))
}
func (e *event) Subscribe(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, pubsub.Consumer) {
chEvent := make(chan *Event, 100) // TODO: check best size here
chErr := make(chan error)
g := func(payload []byte) error {
event := &Event{}
err := json.Unmarshal(payload, event)
if err != nil {
// This should never happen
return err
}
select {
case chEvent <- event:
default:
}
return nil
}
option := pubsub.WithChannelNamespace(format(spaceID))
consumer := e.pubsub.Subscribe(ctx, e.topic, g, option)
return chEvent, chErr, consumer
}
func (e *event) Unsubscribe(ctx context.Context, consumer pubsub.Consumer) error {
return consumer.Unsubscribe(ctx, e.topic)
}

View File

@ -7,7 +7,7 @@ package file
import (
"context"
"fmt"
"io/ioutil"
"io"
"github.com/harness/gitness/gitrpc"
"github.com/harness/gitness/types"
@ -37,7 +37,7 @@ func (f *service) Get(
IncludeLatestCommit: false,
})
if err != nil {
return nil, err
return nil, fmt.Errorf("failed to read tree node: %w", err)
}
// viewing Raw content is only supported for blob content
if treeNodeOutput.Node.Type != gitrpc.TreeNodeTypeBlob {
@ -53,7 +53,7 @@ func (f *service) Get(
return nil, fmt.Errorf("failed to read blob from gitrpc: %w", err)
}
buf, err := ioutil.ReadAll(blobReader.Content)
buf, err := io.ReadAll(blobReader.Content)
if err != nil {
return nil, fmt.Errorf("could not read blob content from file: %w", err)
}

View File

@ -8,8 +8,6 @@ import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"github.com/harness/gitness/livelog"
"github.com/harness/gitness/types"
@ -102,7 +100,8 @@ func (e *embedded) Detail(ctx context.Context, stage *drone.Stage) (*client.Cont
func (e *embedded) Update(ctx context.Context, stage *drone.Stage) error {
var err error
convertedStage := convertFromDroneStage(stage)
if stage.Status == enum.CIStatusPending || stage.Status == enum.CIStatusRunning {
status := enum.ParseCIStatus(stage.Status)
if status == enum.CIStatusPending || status == enum.CIStatusRunning {
err = e.manager.BeforeStage(ctx, convertedStage)
} else {
err = e.manager.AfterStage(ctx, convertedStage)
@ -115,7 +114,8 @@ func (e *embedded) Update(ctx context.Context, stage *drone.Stage) error {
func (e *embedded) UpdateStep(ctx context.Context, step *drone.Step) error {
var err error
convertedStep := convertFromDroneStep(step)
if step.Status == enum.CIStatusPending || step.Status == enum.CIStatusRunning {
status := enum.ParseCIStatus(step.Status)
if status == enum.CIStatusPending || status == enum.CIStatusRunning {
err = e.manager.BeforeStep(ctx, convertedStep)
} else {
err = e.manager.AfterStep(ctx, convertedStep)
@ -125,15 +125,13 @@ func (e *embedded) UpdateStep(ctx context.Context, step *drone.Step) error {
}
// Watch watches for build cancellation requests.
func (e *embedded) Watch(ctx context.Context, stage int64) (bool, error) {
// Implement Watch logic here
return false, errors.New("Not implemented")
func (e *embedded) Watch(ctx context.Context, executionID int64) (bool, error) {
return e.manager.Watch(ctx, executionID)
}
// Batch batch writes logs to the streaming logs.
func (e *embedded) Batch(ctx context.Context, step int64, lines []*drone.Line) error {
for _, l := range lines {
fmt.Println("line is: ", l)
line := convertFromDroneLine(l)
err := e.manager.Write(ctx, step, line)
if err != nil {

View File

@ -10,6 +10,7 @@ import (
"github.com/harness/gitness/internal/pipeline/file"
"github.com/harness/gitness/livelog"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/drone/drone-go/drone"
"github.com/drone/runner-go/client"
@ -23,7 +24,7 @@ func convertToDroneStage(stage *types.Stage) *drone.Stage {
Name: stage.Name,
Kind: stage.Kind,
Type: stage.Type,
Status: stage.Status,
Status: string(stage.Status),
Error: stage.Error,
ErrIgnore: stage.ErrIgnore,
ExitCode: stage.ExitCode,
@ -34,10 +35,10 @@ func convertToDroneStage(stage *types.Stage) *drone.Stage {
Kernel: stage.Kernel,
Limit: stage.Limit,
LimitRepo: stage.LimitRepo,
Started: stage.Started,
Stopped: stage.Stopped,
Created: stage.Created,
Updated: stage.Updated,
Started: stage.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Stopped: stage.Stopped / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Created: stage.Created / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Updated: stage.Updated / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Version: stage.Version,
OnSuccess: stage.OnSuccess,
OnFailure: stage.OnFailure,
@ -61,12 +62,12 @@ func convertToDroneStep(step *types.Step) *drone.Step {
StageID: step.StageID,
Number: int(step.Number),
Name: step.Name,
Status: step.Status,
Status: string(step.Status),
Error: step.Error,
ErrIgnore: step.ErrIgnore,
ExitCode: step.ExitCode,
Started: step.Started,
Stopped: step.Stopped,
Started: step.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Stopped: step.Stopped / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Version: step.Version,
DependsOn: step.DependsOn,
Image: step.Image,
@ -81,12 +82,12 @@ func convertFromDroneStep(step *drone.Step) *types.Step {
StageID: step.StageID,
Number: int64(step.Number),
Name: step.Name,
Status: step.Status,
Status: enum.ParseCIStatus(step.Status),
Error: step.Error,
ErrIgnore: step.ErrIgnore,
ExitCode: step.ExitCode,
Started: step.Started,
Stopped: step.Stopped,
Started: step.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Stopped: step.Stopped * 1e3,
Version: step.Version,
DependsOn: step.DependsOn,
Image: step.Image,
@ -103,12 +104,12 @@ func convertFromDroneSteps(steps []*drone.Step) []*types.Step {
StageID: step.StageID,
Number: int64(step.Number),
Name: step.Name,
Status: step.Status,
Status: enum.ParseCIStatus(step.Status),
Error: step.Error,
ErrIgnore: step.ErrIgnore,
ExitCode: step.ExitCode,
Started: step.Started,
Stopped: step.Stopped,
Started: step.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Stopped: step.Stopped * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Version: step.Version,
DependsOn: step.DependsOn,
Image: step.Image,
@ -127,7 +128,7 @@ func convertFromDroneStage(stage *drone.Stage) *types.Stage {
Name: stage.Name,
Kind: stage.Kind,
Type: stage.Type,
Status: stage.Status,
Status: enum.ParseCIStatus(stage.Status),
Error: stage.Error,
ErrIgnore: stage.ErrIgnore,
ExitCode: stage.ExitCode,
@ -138,8 +139,8 @@ func convertFromDroneStage(stage *drone.Stage) *types.Stage {
Kernel: stage.Kernel,
Limit: stage.Limit,
LimitRepo: stage.LimitRepo,
Started: stage.Started,
Stopped: stage.Stopped,
Started: stage.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Stopped: stage.Stopped * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Version: stage.Version,
OnSuccess: stage.OnSuccess,
OnFailure: stage.OnFailure,
@ -164,7 +165,7 @@ func convertToDroneBuild(execution *types.Execution) *drone.Build {
Trigger: execution.Trigger,
Number: execution.Number,
Parent: execution.Parent,
Status: execution.Status,
Status: string(execution.Status),
Error: execution.Error,
Event: execution.Event,
Action: execution.Action,
@ -188,10 +189,10 @@ func convertToDroneBuild(execution *types.Execution) *drone.Build {
Deploy: execution.Deploy,
DeployID: execution.DeployID,
Debug: execution.Debug,
Started: execution.Started,
Finished: execution.Finished,
Created: execution.Created,
Updated: execution.Updated,
Started: execution.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Finished: execution.Finished / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Created: execution.Created / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Updated: execution.Updated / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli()
Version: execution.Version,
}
}

View File

@ -10,9 +10,9 @@ import (
"fmt"
"io"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/pipeline/file"
"github.com/harness/gitness/internal/pipeline/scheduler"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
urlprovider "github.com/harness/gitness/internal/url"
"github.com/harness/gitness/livelog"
@ -63,6 +63,9 @@ type (
// Request requests the next available build stage for execution.
Request(ctx context.Context, args *Request) (*types.Stage, error)
// Watch watches for build cancellation requests.
Watch(ctx context.Context, executionID int64) (bool, error)
// Accept accepts the build stage for execution.
Accept(ctx context.Context, stage int64, machine string) (*types.Stage, error)
@ -97,8 +100,9 @@ type Manager struct {
FileService file.FileService
Pipelines store.PipelineStore
urlProvider *urlprovider.Provider
Checks store.CheckStore
// Converter store.ConvertService
Events events.EventsStreamer
SSEStreamer sse.Streamer
// Globals store.GlobalSecretStore
Logs store.LogStore
Logz livelog.LogStream
@ -119,10 +123,11 @@ func New(
executionStore store.ExecutionStore,
pipelineStore store.PipelineStore,
urlProvider *urlprovider.Provider,
events events.EventsStreamer,
sseStreamer sse.Streamer,
fileService file.FileService,
logStore store.LogStore,
logStream livelog.LogStream,
checkStore store.CheckStore,
repoStore store.RepoStore,
scheduler scheduler.Scheduler,
secretStore store.SecretStore,
@ -135,10 +140,11 @@ func New(
Executions: executionStore,
Pipelines: pipelineStore,
urlProvider: urlProvider,
Events: events,
SSEStreamer: sseStreamer,
FileService: fileService,
Logs: logStore,
Logz: logStream,
Checks: checkStore,
Repos: repoStore,
Scheduler: scheduler,
Secrets: secretStore,
@ -300,7 +306,7 @@ func (m *Manager) Details(ctx context.Context, stageID int64) (*ExecutionContext
// Before signals the build step is about to start.
func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error {
log := log.With().
Str("step.status", step.Status).
Str("step.status", string(step.Status)).
Str("step.name", step.Name).
Int64("step.id", step.ID).
Logger()
@ -313,11 +319,11 @@ func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error {
return err
}
updater := &updater{
Executions: m.Executions,
Events: m.Events,
Repos: m.Repos,
Steps: m.Steps,
Stages: m.Stages,
Executions: m.Executions,
SSEStreamer: m.SSEStreamer,
Repos: m.Repos,
Steps: m.Steps,
Stages: m.Stages,
}
return updater.do(noContext, step)
}
@ -325,7 +331,7 @@ func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error {
// After signals the build step is complete.
func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error {
log := log.With().
Str("step.status", step.Status).
Str("step.status", string(step.Status)).
Str("step.name", step.Name).
Int64("step.id", step.ID).
Logger()
@ -333,11 +339,11 @@ func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error {
var retErr error
updater := &updater{
Executions: m.Executions,
Events: m.Events,
Repos: m.Repos,
Steps: m.Steps,
Stages: m.Stages,
Executions: m.Executions,
SSEStreamer: m.SSEStreamer,
Repos: m.Repos,
Steps: m.Steps,
Stages: m.Stages,
}
if err := updater.do(noContext, step); err != nil {
@ -354,12 +360,14 @@ func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error {
// BeforeAll signals the build stage is about to start.
func (m *Manager) BeforeStage(ctx context.Context, stage *types.Stage) error {
s := &setup{
Executions: m.Executions,
Events: m.Events,
Repos: m.Repos,
Steps: m.Steps,
Stages: m.Stages,
Users: m.Users,
Executions: m.Executions,
Checks: m.Checks,
Pipelines: m.Pipelines,
SSEStreamer: m.SSEStreamer,
Repos: m.Repos,
Steps: m.Steps,
Stages: m.Stages,
Users: m.Users,
}
return s.do(noContext, stage)
@ -368,13 +376,48 @@ func (m *Manager) BeforeStage(ctx context.Context, stage *types.Stage) error {
// AfterAll signals the build stage is complete.
func (m *Manager) AfterStage(ctx context.Context, stage *types.Stage) error {
t := &teardown{
Executions: m.Executions,
Events: m.Events,
Logs: m.Logz,
Repos: m.Repos,
Scheduler: m.Scheduler,
Steps: m.Steps,
Stages: m.Stages,
Executions: m.Executions,
Pipelines: m.Pipelines,
Checks: m.Checks,
SSEStreamer: m.SSEStreamer,
Logs: m.Logz,
Repos: m.Repos,
Scheduler: m.Scheduler,
Steps: m.Steps,
Stages: m.Stages,
}
return t.do(noContext, stage)
}
// Watch watches for build cancellation requests.
func (m *Manager) Watch(ctx context.Context, executionID int64) (bool, error) {
ok, err := m.Scheduler.Cancelled(ctx, executionID)
// we expect a context cancel error here which
// indicates a polling timeout. The subscribing
// client should look for the context cancel error
// and resume polling.
if err != nil {
return ok, err
}
// // TODO: we should be able to return
// // immediately if Cancelled returns true. This requires
// // some more testing but would avoid the extra database
// // call.
// if ok {
// return ok, err
// }
// if no error is returned we should check
// the database to see if the build is complete. If
// complete, return true.
execution, err := m.Executions.Find(ctx, executionID)
if err != nil {
log := log.With().
Int64("execution.id", executionID).
Logger()
log.Warn().Msg("manager: cannot find build")
return ok, fmt.Errorf("could not find build for cancellation: %w", err)
}
return execution.Status.IsDone(), nil
}

View File

@ -6,11 +6,11 @@ package manager
import (
"context"
"encoding/json"
"errors"
"time"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/pipeline/checks"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
gitness_store "github.com/harness/gitness/store"
"github.com/harness/gitness/types"
@ -20,12 +20,14 @@ import (
)
type setup struct {
Executions store.ExecutionStore
Events events.EventsStreamer
Repos store.RepoStore
Steps store.StepStore
Stages store.StageStore
Users store.PrincipalStore
Executions store.ExecutionStore
Checks store.CheckStore
SSEStreamer sse.Streamer
Pipelines store.PipelineStore
Repos store.RepoStore
Steps store.StepStore
Stages store.StageStore
Users store.PrincipalStore
}
func (s *setup) do(ctx context.Context, stage *types.Stage) error {
@ -54,7 +56,7 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error {
err = s.Stages.Update(noContext, stage)
if err != nil {
log.Error().Err(err).
Str("stage.status", stage.Status).
Str("stage.status", string(stage.Status)).
Msg("manager: cannot update the stage")
return err
}
@ -67,7 +69,7 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error {
err := s.Steps.Create(noContext, step)
if err != nil {
log.Error().Err(err).
Str("stage.status", stage.Status).
Str("stage.status", string(stage.Status)).
Str("step.name", step.Name).
Int64("step.id", step.ID).
Msg("manager: cannot persist the step")
@ -80,13 +82,23 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error {
log.Error().Err(err).Msg("manager: cannot update the execution")
return err
}
pipeline, err := s.Pipelines.Find(ctx, execution.PipelineID)
if err != nil {
log.Error().Err(err).Msg("manager: cannot find pipeline")
return err
}
// try to write to the checks store - if not, log an error and continue
err = checks.Write(ctx, s.Checks, execution, pipeline)
if err != nil {
log.Error().Err(err).Msg("manager: could not write to checks store")
}
stages, err := s.Stages.ListWithSteps(noContext, execution.ID)
if err != nil {
log.Error().Err(err).Msg("manager: could not list stages with steps")
return err
}
execution.Stages = stages
err = s.Events.Publish(noContext, repo.ParentID, executionEvent(enum.ExecutionRunning, execution))
err = s.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionRunning, execution)
if err != nil {
log.Warn().Err(err).Msg("manager: could not publish execution event")
}
@ -94,18 +106,6 @@ func (s *setup) do(ctx context.Context, stage *types.Stage) error {
return nil
}
func executionEvent(
eventType enum.EventType,
execution *types.Execution,
) *events.Event {
// json.Marshal will not return an error here, it can be absorbed
bytes, _ := json.Marshal(execution)
return &events.Event{
Type: eventType,
Data: bytes,
}
}
// helper function that updates the execution status from pending to running.
// This accounts for the fact that another agent may have already updated
// the execution status, which may happen if two stages execute concurrently.

View File

@ -8,8 +8,9 @@ import (
"context"
"time"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/pipeline/checks"
"github.com/harness/gitness/internal/pipeline/scheduler"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/livelog"
gitness_store "github.com/harness/gitness/store"
@ -20,13 +21,15 @@ import (
)
type teardown struct {
Executions store.ExecutionStore
Events events.EventsStreamer
Logs livelog.LogStream
Scheduler scheduler.Scheduler
Repos store.RepoStore
Steps store.StepStore
Stages store.StageStore
Executions store.ExecutionStore
Checks store.CheckStore
Pipelines store.PipelineStore
SSEStreamer sse.Streamer
Logs livelog.LogStream
Scheduler scheduler.Scheduler
Repos store.RepoStore
Steps store.StepStore
Stages store.StageStore
}
func (t *teardown) do(ctx context.Context, stage *types.Stage) error {
@ -45,7 +48,7 @@ func (t *teardown) do(ctx context.Context, stage *types.Stage) error {
Int64("execution.number", execution.Number).
Int64("execution.id", execution.ID).
Int64("repo.id", execution.RepoID).
Str("stage.status", stage.Status).
Str("stage.status", string(stage.Status)).
Logger()
repo, err := t.Repos.Find(noContext, execution.RepoID)
@ -134,12 +137,23 @@ func (t *teardown) do(ctx context.Context, stage *types.Stage) error {
}
execution.Stages = stages
err = t.Events.Publish(noContext, repo.ParentID, executionEvent(enum.ExecutionCompleted, execution))
err = t.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionCompleted, execution)
if err != nil {
log.Warn().Err(err).
Msg("manager: could not publish execution completed event")
}
pipeline, err := t.Pipelines.Find(ctx, execution.PipelineID)
if err != nil {
log.Error().Err(err).Msg("manager: cannot find pipeline")
return err
}
// try to write to the checks store - if not, log an error and continue
err = checks.Write(ctx, t.Checks, execution, pipeline)
if err != nil {
log.Error().Err(err).Msg("manager: could not write to checks store")
}
return nil
}

View File

@ -7,7 +7,7 @@ package manager
import (
"context"
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
@ -16,17 +16,17 @@ import (
)
type updater struct {
Executions store.ExecutionStore
Repos store.RepoStore
Events events.EventsStreamer
Steps store.StepStore
Stages store.StageStore
Executions store.ExecutionStore
Repos store.RepoStore
SSEStreamer sse.Streamer
Steps store.StepStore
Stages store.StageStore
}
func (u *updater) do(ctx context.Context, step *types.Step) error {
log := log.With().
Str("step.name", step.Name).
Str("step.status", step.Status).
Str("step.status", string(step.Status)).
Int64("step.id", step.ID).
Logger()
@ -64,7 +64,7 @@ func (u *updater) do(ctx context.Context, step *types.Step) error {
}
execution.Stages = stages
err = u.Events.Publish(noContext, repo.ParentID, executionEvent(enum.ExecutionUpdated, execution))
err = u.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionUpdated, execution)
if err != nil {
log.Warn().Err(err).Msg("manager: cannot publish execution updated event")
}

View File

@ -5,9 +5,9 @@
package manager
import (
"github.com/harness/gitness/internal/pipeline/events"
"github.com/harness/gitness/internal/pipeline/file"
"github.com/harness/gitness/internal/pipeline/scheduler"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/url"
"github.com/harness/gitness/livelog"
@ -29,18 +29,19 @@ func ProvideExecutionManager(
executionStore store.ExecutionStore,
pipelineStore store.PipelineStore,
urlProvider *url.Provider,
events events.EventsStreamer,
sseStreamer sse.Streamer,
fileService file.FileService,
logStore store.LogStore,
logStream livelog.LogStream,
checkStore store.CheckStore,
repoStore store.RepoStore,
scheduler scheduler.Scheduler,
secretStore store.SecretStore,
stageStore store.StageStore,
stepStore store.StepStore,
userStore store.PrincipalStore) ExecutionManager {
return New(config, executionStore, pipelineStore, urlProvider, events, fileService, logStore,
logStream, repoStore, scheduler, secretStore, stageStore, stepStore, userStore)
return New(config, executionStore, pipelineStore, urlProvider, sseStreamer, fileService, logStore,
logStream, checkStore, repoStore, scheduler, secretStore, stageStore, stepStore, userStore)
}
// ProvideExecutionClient provides a client implementation to interact with the execution manager.

View File

@ -0,0 +1,80 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package scheduler
import (
"context"
"sync"
"time"
)
type canceler struct {
sync.Mutex
subscribers map[chan struct{}]int64
cancelled map[int64]time.Time
}
func newCanceler() *canceler {
return &canceler{
subscribers: make(map[chan struct{}]int64),
cancelled: make(map[int64]time.Time),
}
}
func (c *canceler) Cancel(ctx context.Context, id int64) error {
c.Lock()
defer c.Unlock()
c.cancelled[id] = time.Now().Add(time.Minute * 5)
for subscriber, build := range c.subscribers {
if id == build {
close(subscriber)
}
}
c.collect()
return nil
}
func (c *canceler) Cancelled(ctx context.Context, id int64) (bool, error) {
subscriber := make(chan struct{})
c.Lock()
c.subscribers[subscriber] = id
c.Unlock()
defer func() {
c.Lock()
delete(c.subscribers, subscriber)
c.Unlock()
}()
for {
select {
case <-ctx.Done():
return false, ctx.Err()
case <-time.After(time.Minute):
c.Lock()
_, ok := c.cancelled[id]
c.Unlock()
if ok {
return true, nil
}
case <-subscriber:
return true, nil
}
}
}
func (c *canceler) collect() {
// the list of cancelled builds is stored with a ttl, and
// is not removed until the ttl is reached. This provides
// adequate window for clients with connectivity issues to
// reconnect and receive notification of cancel events.
now := time.Now()
for build, timestamp := range c.cancelled {
if now.After(timestamp) {
delete(c.cancelled, build)
}
}
}

View File

@ -7,6 +7,8 @@ package scheduler
import (
"context"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/lock"
"github.com/harness/gitness/types"
)
@ -29,4 +31,29 @@ type Scheduler interface {
// Request requests the next stage scheduled for execution.
Request(ctx context.Context, filter Filter) (*types.Stage, error)
// Cancel cancels scheduled or running jobs associated
// with the parent build ID.
Cancel(context.Context, int64) error
// Cancelled blocks and listens for a cancellation event and
// returns true if the build has been cancelled.
Cancelled(context.Context, int64) (bool, error)
}
type scheduler struct {
*queue
*canceler
}
// newScheduler provides an instance of a scheduler with cancel abilities
func newScheduler(stageStore store.StageStore, lock lock.MutexManager) (Scheduler, error) {
q, err := newQueue(stageStore, lock)
if err != nil {
return nil, err
}
return scheduler{
q,
newCanceler(),
}, nil
}

View File

@ -21,5 +21,5 @@ func ProvideScheduler(
stageStore store.StageStore,
lock lock.MutexManager,
) (Scheduler, error) {
return newQueue(stageStore, lock)
return newScheduler(stageStore, lock)
}

View File

@ -9,6 +9,7 @@ import (
"runtime/debug"
"time"
"github.com/harness/gitness/internal/pipeline/checks"
"github.com/harness/gitness/internal/pipeline/file"
"github.com/harness/gitness/internal/pipeline/scheduler"
"github.com/harness/gitness/internal/pipeline/triggerer/dag"
@ -29,6 +30,7 @@ var _ Triggerer = (*triggerer)(nil)
type Hook struct {
Parent int64 `json:"parent"`
Trigger string `json:"trigger"`
TriggeredBy int64 `json:"triggered_by"`
Action enum.TriggerAction `json:"action"`
Link string `json:"link"`
Timestamp int64 `json:"timestamp"`
@ -59,6 +61,7 @@ type Triggerer interface {
type triggerer struct {
executionStore store.ExecutionStore
checkStore store.CheckStore
stageStore store.StageStore
db *sqlx.DB
pipelineStore store.PipelineStore
@ -69,6 +72,7 @@ type triggerer struct {
func New(
executionStore store.ExecutionStore,
checkStore store.CheckStore,
stageStore store.StageStore,
pipelineStore store.PipelineStore,
db *sqlx.DB,
@ -78,6 +82,7 @@ func New(
) *triggerer {
return &triggerer{
executionStore: executionStore,
checkStore: checkStore,
stageStore: stageStore,
scheduler: scheduler,
db: db,
@ -279,6 +284,7 @@ func (t *triggerer) Trigger(
RepoID: repo.ID,
PipelineID: pipeline.ID,
Trigger: base.Trigger,
CreatedBy: base.TriggeredBy,
Number: pipeline.Seq,
Parent: base.Parent,
Status: enum.CIStatusPending,
@ -308,8 +314,8 @@ func (t *triggerer) Trigger(
stages := make([]*types.Stage, len(matched))
for i, match := range matched {
onSuccess := match.Trigger.Status.Match(enum.CIStatusSuccess)
onFailure := match.Trigger.Status.Match(enum.CIStatusFailure)
onSuccess := match.Trigger.Status.Match(string(enum.CIStatusSuccess))
onFailure := match.Trigger.Status.Match(string(enum.CIStatusFailure))
if len(match.Trigger.Status.Include)+len(match.Trigger.Status.Exclude) == 0 {
onFailure = false
}
@ -377,6 +383,12 @@ func (t *triggerer) Trigger(
return nil, err
}
// try to write to check store. log on failure but don't error out the execution
err = checks.Write(ctx, t.checkStore, execution, pipeline)
if err != nil {
log.Error().Err(err).Msg("trigger: could not write to check store")
}
// err = t.status.Send(ctx, user, &core.StatusInput{
// Repo: repo,
// Execution: execution,
@ -453,6 +465,7 @@ func (t *triggerer) createExecutionWithError(
execution := &types.Execution{
RepoID: pipeline.RepoID,
PipelineID: pipeline.ID,
Number: pipeline.Seq,
Parent: base.Parent,
Status: enum.CIStatusError,
@ -462,6 +475,7 @@ func (t *triggerer) createExecutionWithError(
Link: base.Link,
Title: base.Title,
Message: base.Message,
CreatedBy: base.TriggeredBy,
Before: base.Before,
After: base.After,
Ref: base.Ref,
@ -486,5 +500,11 @@ func (t *triggerer) createExecutionWithError(
return nil, err
}
// try to write to check store, log on failure
err = checks.Write(ctx, t.checkStore, execution, pipeline)
if err != nil {
log.Error().Err(err).Msg("trigger: failed to update check")
}
return execution, nil
}

View File

@ -21,6 +21,7 @@ var WireSet = wire.NewSet(
// ProvideTriggerer provides a triggerer which can execute builds.
func ProvideTriggerer(
executionStore store.ExecutionStore,
checkStore store.CheckStore,
stageStore store.StageStore,
db *sqlx.DB,
pipelineStore store.PipelineStore,
@ -28,6 +29,6 @@ func ProvideTriggerer(
scheduler scheduler.Scheduler,
repoStore store.RepoStore,
) Triggerer {
return New(executionStore, stageStore, pipelineStore,
return New(executionStore, checkStore, stageStore, pipelineStore,
db, repoStore, scheduler, fileService)
}

View File

@ -189,7 +189,7 @@ func setupSpaces(r chi.Router, spaceCtrl *space.Controller) {
r.Patch("/", handlerspace.HandleUpdate(spaceCtrl))
r.Delete("/", handlerspace.HandleDelete(spaceCtrl))
r.Get("/stream", handlerspace.HandleEventsStream(spaceCtrl))
r.Get("/events", handlerspace.HandleEvents(spaceCtrl))
r.Post("/move", handlerspace.HandleMove(spaceCtrl))
r.Get("/spaces", handlerspace.HandleListSpaces(spaceCtrl))
@ -408,7 +408,7 @@ func setupExecutions(
r.Post("/", handlerexecution.HandleCreate(executionCtrl))
r.Route(fmt.Sprintf("/{%s}", request.PathParamExecutionNumber), func(r chi.Router) {
r.Get("/", handlerexecution.HandleFind(executionCtrl))
r.Patch("/", handlerexecution.HandleUpdate(executionCtrl))
r.Post("/cancel", handlerexecution.HandleCancel(executionCtrl))
r.Delete("/", handlerexecution.HandleDelete(executionCtrl))
r.Get(
fmt.Sprintf("/logs/{%s}/{%s}",

View File

@ -0,0 +1,24 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package importer
import (
"strconv"
"strings"
)
const jobIDPrefix = "import-repo-"
func JobIDFromRepoID(repoID int64) string {
return jobIDPrefix + strconv.FormatInt(repoID, 10)
}
func RepoIDFromJobID(jobID string) int64 {
if !strings.HasPrefix(jobID, jobIDPrefix) {
return 0
}
repoID, _ := strconv.ParseInt(jobID[len(jobIDPrefix):], 10, 64)
return repoID
}

View File

@ -6,7 +6,8 @@ package importer
import (
"context"
"errors"
"crypto/sha512"
"encoding/base32"
"fmt"
"net/http"
"time"
@ -55,31 +56,35 @@ func (r *RepositoryInfo) ToRepo(
path string,
uid string,
description string,
jobUID string,
principal *types.Principal,
) *types.Repository {
now := time.Now().UnixMilli()
gitTempUID := "importing-" + jobUID
gitTempUID := fmt.Sprintf("importing-%s-%d", hash(path), now)
return &types.Repository{
Version: 0,
ParentID: spaceID,
UID: uid,
GitUID: gitTempUID, // the correct git UID will be set by the job handler
Path: path,
Description: description,
IsPublic: r.IsPublic,
CreatedBy: principal.ID,
Created: now,
Updated: now,
ForkID: 0,
DefaultBranch: r.DefaultBranch,
Importing: true,
ImportingJobUID: &jobUID,
Version: 0,
ParentID: spaceID,
UID: uid,
GitUID: gitTempUID, // the correct git UID will be set by the job handler
Path: path,
Description: description,
IsPublic: r.IsPublic,
CreatedBy: principal.ID,
Created: now,
Updated: now,
ForkID: 0,
DefaultBranch: r.DefaultBranch,
Importing: true,
}
}
func getClient(provider Provider) (*scm.Client, error) {
if provider.Username == "" || provider.Password == "" {
func hash(s string) string {
h := sha512.New()
_, _ = h.Write([]byte(s))
return base32.StdEncoding.EncodeToString(h.Sum(nil)[:10])
}
func getClient(provider Provider, authReq bool) (*scm.Client, error) {
if authReq && (provider.Username == "" || provider.Password == "") {
return nil, usererror.BadRequest("scm provider authentication credentials missing")
}
@ -114,16 +119,18 @@ func getClient(provider Provider) (*scm.Client, error) {
return nil, usererror.BadRequestf("unsupported scm provider: %s", provider)
}
c.Client = &http.Client{
Transport: &oauth2.Transport{
Source: oauth2.StaticTokenSource(&scm.Token{Token: provider.Password}),
},
if provider.Password != "" {
c.Client = &http.Client{
Transport: &oauth2.Transport{
Source: oauth2.StaticTokenSource(&scm.Token{Token: provider.Password}),
},
}
}
return c, nil
}
func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug string) (RepositoryInfo, error) {
scmClient, err := getClient(provider)
scmClient, err := getClient(provider, false)
if err != nil {
return RepositoryInfo{}, err
}
@ -132,24 +139,9 @@ func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug
return RepositoryInfo{}, usererror.BadRequest("provider repository identifier is missing")
}
var statusCode int
scmRepo, scmResp, err := scmClient.Repositories.Find(ctx, repoSlug)
if scmResp != nil {
statusCode = scmResp.Status
}
if errors.Is(err, scm.ErrNotFound) || statusCode == http.StatusNotFound {
return RepositoryInfo{},
usererror.BadRequestf("repository %s not found at %s", repoSlug, provider.Type)
}
if errors.Is(err, scm.ErrNotAuthorized) || statusCode == http.StatusUnauthorized {
return RepositoryInfo{},
usererror.BadRequestf("bad credentials provided for %s at %s", repoSlug, provider.Type)
}
if err != nil || statusCode > 299 {
return RepositoryInfo{},
fmt.Errorf("failed to fetch repository %s from %s, status=%d: %w",
repoSlug, provider.Type, statusCode, err)
if err = convertSCMError(provider, repoSlug, scmResp, err); err != nil {
return RepositoryInfo{}, err
}
return RepositoryInfo{
@ -162,7 +154,7 @@ func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug
}
func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, spaceSlug string) ([]RepositoryInfo, error) {
scmClient, err := getClient(provider)
scmClient, err := getClient(provider, true)
if err != nil {
return nil, err
}
@ -172,27 +164,23 @@ func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, s
}
const pageSize = 100
opts := scm.ListOptions{Page: 0, Size: pageSize}
opts := scm.RepoListOptions{
ListOptions: scm.ListOptions{
Page: 0,
Size: pageSize,
},
RepoSearchTerm: scm.RepoSearchTerm{
User: spaceSlug,
},
}
repos := make([]RepositoryInfo, 0)
for {
opts.Page++
var statusCode int
scmRepos, scmResp, err := scmClient.Repositories.List(ctx, opts)
if scmResp != nil {
statusCode = scmResp.Status
}
if errors.Is(err, scm.ErrNotFound) || statusCode == http.StatusNotFound {
return nil, usererror.BadRequestf("space %s not found at %s", spaceSlug, provider.Type)
}
if errors.Is(err, scm.ErrNotAuthorized) || statusCode == http.StatusUnauthorized {
return nil, usererror.BadRequestf("bad credentials provided for %s at %s", spaceSlug, provider.Type)
}
if err != nil || statusCode > 299 {
return nil, fmt.Errorf("failed to fetch space %s from %s, status=%d: %w",
spaceSlug, provider.Type, statusCode, err)
scmRepos, scmResp, err := scmClient.Repositories.ListV2(ctx, opts)
if err = convertSCMError(provider, spaceSlug, scmResp, err); err != nil {
return nil, err
}
if len(scmRepos) == 0 {
@ -200,9 +188,6 @@ func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, s
}
for _, scmRepo := range scmRepos {
if scmRepo.Namespace != spaceSlug {
continue
}
repos = append(repos, RepositoryInfo{
Space: scmRepo.Namespace,
UID: scmRepo.Name,
@ -215,3 +200,34 @@ func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, s
return repos, nil
}
func convertSCMError(provider Provider, slug string, r *scm.Response, err error) error {
if err == nil {
return nil
}
if r == nil {
if provider.Host != "" {
return usererror.BadRequestf("failed to make HTTP request to %s (host=%s): %s",
provider.Type, provider.Host, err)
} else {
return usererror.BadRequestf("failed to make HTTP request to %s: %s",
provider.Type, err)
}
}
switch r.Status {
case http.StatusNotFound:
return usererror.BadRequestf("couldn't find %s at %s: %s",
slug, provider.Type, err.Error())
case http.StatusUnauthorized:
return usererror.BadRequestf("bad credentials provided for %s at %s: %s",
slug, provider.Type, err.Error())
case http.StatusForbidden:
return usererror.BadRequestf("access denied to %s at %s: %s",
slug, provider.Type, err.Error())
default:
return usererror.BadRequestf("failed to fetch %s from %s (HTTP status %d): %s",
slug, provider.Type, r.Status, err.Error())
}
}

View File

@ -19,10 +19,12 @@ import (
"github.com/harness/gitness/internal/bootstrap"
"github.com/harness/gitness/internal/githook"
"github.com/harness/gitness/internal/services/job"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
gitnessurl "github.com/harness/gitness/internal/url"
gitness_store "github.com/harness/gitness/store"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/rs/zerolog/log"
)
@ -39,6 +41,7 @@ type Repository struct {
repoStore store.RepoStore
encrypter encrypt.Encrypter
scheduler *job.Scheduler
sseStreamer sse.Streamer
}
var _ job.Handler = (*Repository)(nil)
@ -58,7 +61,7 @@ func (r *Repository) Register(executor *job.Executor) error {
// Run starts a background job that imports the provided repository from the provided clone URL.
func (r *Repository) Run(ctx context.Context, provider Provider, repo *types.Repository, cloneURL string) error {
jobDef, err := r.getJobDef(*repo.ImportingJobUID, Input{
jobDef, err := r.getJobDef(JobIDFromRepoID(repo.ID), Input{
RepoID: repo.ID,
GitUser: provider.Username,
GitPass: provider.Password,
@ -90,7 +93,7 @@ func (r *Repository) RunMany(ctx context.Context,
repo := repos[k]
cloneURL := cloneURLs[k]
jobDef, err := r.getJobDef(*repo.ImportingJobUID, Input{
jobDef, err := r.getJobDef(JobIDFromRepoID(repo.ID), Input{
RepoID: repo.ID,
GitUser: provider.Username,
GitPass: provider.Password,
@ -167,16 +170,14 @@ func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressRepo
return "", errors.New("missing git repository clone URL")
}
if input.GitUser != "" || input.GitPass != "" {
repoURL, err := url.Parse(input.CloneURL)
if err != nil {
return "", fmt.Errorf("failed to parse git clone URL: %w", err)
}
repoURL.User = url.UserPassword(input.GitUser, input.GitPass)
input.CloneURL = repoURL.String()
repoURL, err := url.Parse(input.CloneURL)
if err != nil {
return "", fmt.Errorf("failed to parse git clone URL: %w", err)
}
repoURL.User = url.UserPassword(input.GitUser, input.GitPass)
cloneURLWithAuth := repoURL.String()
repo, err := r.repoStore.Find(ctx, input.RepoID)
if err != nil {
return "", fmt.Errorf("failed to find repo by id: %w", err)
@ -186,23 +187,38 @@ func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressRepo
return "", fmt.Errorf("repository %s is not being imported", repo.UID)
}
log := log.Ctx(ctx).With().
Int64("repo.id", repo.ID).
Str("repo.path", repo.Path).
Logger()
log.Info().Msg("create git repository")
gitUID, err := r.createGitRepository(ctx, &systemPrincipal, repo.ID)
if err != nil {
return "", fmt.Errorf("failed to create empty git repository: %w", err)
}
log.Info().Msgf("successfully created git repository with git_uid '%s'", gitUID)
err = func() error {
repo.GitUID = gitUID
defaultBranch, err := r.syncGitRepository(ctx, &systemPrincipal, repo, input.CloneURL)
log.Info().Msg("sync repository")
defaultBranch, err := r.syncGitRepository(ctx, &systemPrincipal, repo, cloneURLWithAuth)
if err != nil {
return fmt.Errorf("failed to sync git repository from '%s': %w", input.CloneURL, err)
}
log.Info().Msgf("successfully synced repository (returned default branch: '%s')", defaultBranch)
if defaultBranch == "" {
defaultBranch = r.defaultBranch
}
log.Info().Msg("update repo in DB")
repo, err = r.repoStore.UpdateOptLock(ctx, repo, func(repo *types.Repository) error {
if !repo.Importing {
return errors.New("repository has already finished importing")
@ -221,25 +237,33 @@ func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressRepo
return nil
}()
if err != nil {
log.Error().Err(err).Msg("failed repository import - cleanup git repository")
if errDel := r.deleteGitRepository(ctx, &systemPrincipal, repo); errDel != nil {
log.Ctx(ctx).Err(err).
Str("gitUID", gitUID).
log.Warn().Err(errDel).
Msg("failed to delete git repository after failed import")
}
return "", fmt.Errorf("failed to import repository: %w", err)
}
err = r.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypeRepositoryImportCompleted, repo)
if err != nil {
log.Warn().Err(err).Msg("failed to publish import completion SSE")
}
log.Info().Msg("completed repository import")
return "", nil
}
func (r *Repository) GetProgress(ctx context.Context, repo *types.Repository) (types.JobProgress, error) {
if !repo.Importing || repo.ImportingJobUID == nil || *repo.ImportingJobUID == "" {
if !repo.Importing {
// if the repo is not being imported, or it's job ID has been cleared (or never existed) return state=finished
return job.DoneProgress(), nil
}
progress, err := r.scheduler.GetJobProgress(ctx, *repo.ImportingJobUID)
progress, err := r.scheduler.GetJobProgress(ctx, JobIDFromRepoID(repo.ID))
if errors.Is(err, gitness_store.ErrResourceNotFound) {
// if the job is not found return state=failed
return job.FailProgress(), nil
@ -252,11 +276,11 @@ func (r *Repository) GetProgress(ctx context.Context, repo *types.Repository) (t
}
func (r *Repository) Cancel(ctx context.Context, repo *types.Repository) error {
if repo.ImportingJobUID == nil || *repo.ImportingJobUID == "" {
if !repo.Importing {
return nil
}
err := r.scheduler.CancelJob(ctx, *repo.ImportingJobUID)
err := r.scheduler.CancelJob(ctx, JobIDFromRepoID(repo.ID))
if err != nil {
return fmt.Errorf("failed to cancel job: %w", err)
}

View File

@ -8,6 +8,7 @@ import (
"github.com/harness/gitness/encrypt"
"github.com/harness/gitness/gitrpc"
"github.com/harness/gitness/internal/services/job"
"github.com/harness/gitness/internal/sse"
"github.com/harness/gitness/internal/store"
"github.com/harness/gitness/internal/url"
"github.com/harness/gitness/types"
@ -27,6 +28,7 @@ func ProvideRepoImporter(
encrypter encrypt.Encrypter,
scheduler *job.Scheduler,
executor *job.Executor,
sseStreamer sse.Streamer,
) (*Repository, error) {
importer := &Repository{
defaultBranch: config.Git.DefaultBranch,
@ -35,6 +37,7 @@ func ProvideRepoImporter(
repoStore: repoStore,
encrypter: encrypter,
scheduler: scheduler,
sseStreamer: sseStreamer,
}
err := executor.Register(jobType, importer)

View File

@ -10,6 +10,7 @@ import (
"strings"
"github.com/harness/gitness/events"
"github.com/harness/gitness/internal/bootstrap"
gitevents "github.com/harness/gitness/internal/events/git"
"github.com/harness/gitness/internal/pipeline/triggerer"
"github.com/harness/gitness/types/enum"
@ -23,12 +24,13 @@ func ExtractBranch(ref string) string {
func (s *Service) handleEventBranchCreated(ctx context.Context,
event *events.Event[*gitevents.BranchCreatedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionBranchCreated,
Ref: event.Payload.Ref,
Source: ExtractBranch(event.Payload.Ref),
Target: ExtractBranch(event.Payload.Ref),
After: event.Payload.SHA,
Trigger: enum.TriggerHook,
Action: enum.TriggerActionBranchCreated,
Ref: event.Payload.Ref,
Source: ExtractBranch(event.Payload.Ref),
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
Target: ExtractBranch(event.Payload.Ref),
After: event.Payload.SHA,
}
err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.SHA)
if err != nil {
@ -40,13 +42,14 @@ func (s *Service) handleEventBranchCreated(ctx context.Context,
func (s *Service) handleEventBranchUpdated(ctx context.Context,
event *events.Event[*gitevents.BranchUpdatedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionBranchUpdated,
Ref: event.Payload.Ref,
Before: event.Payload.OldSHA,
After: event.Payload.NewSHA,
Source: ExtractBranch(event.Payload.Ref),
Target: ExtractBranch(event.Payload.Ref),
Trigger: enum.TriggerHook,
Action: enum.TriggerActionBranchUpdated,
Ref: event.Payload.Ref,
Before: event.Payload.OldSHA,
After: event.Payload.NewSHA,
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
Source: ExtractBranch(event.Payload.Ref),
Target: ExtractBranch(event.Payload.Ref),
}
err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.NewSHA)
if err != nil {

View File

@ -9,6 +9,7 @@ import (
"fmt"
"github.com/harness/gitness/events"
"github.com/harness/gitness/internal/bootstrap"
pullreqevents "github.com/harness/gitness/internal/events/pullreq"
"github.com/harness/gitness/internal/pipeline/triggerer"
"github.com/harness/gitness/types/enum"
@ -19,9 +20,10 @@ import (
func (s *Service) handleEventPullReqCreated(ctx context.Context,
event *events.Event[*pullreqevents.CreatedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionPullReqCreated,
After: event.Payload.SourceSHA,
Trigger: enum.TriggerHook,
Action: enum.TriggerActionPullReqCreated,
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
After: event.Payload.SourceSHA,
}
err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID)
if err != nil {
@ -33,9 +35,10 @@ func (s *Service) handleEventPullReqCreated(ctx context.Context,
func (s *Service) handleEventPullReqReopened(ctx context.Context,
event *events.Event[*pullreqevents.ReopenedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionPullReqReopened,
After: event.Payload.SourceSHA,
Trigger: enum.TriggerHook,
Action: enum.TriggerActionPullReqReopened,
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
After: event.Payload.SourceSHA,
}
err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID)
if err != nil {
@ -47,9 +50,10 @@ func (s *Service) handleEventPullReqReopened(ctx context.Context,
func (s *Service) handleEventPullReqBranchUpdated(ctx context.Context,
event *events.Event[*pullreqevents.BranchUpdatedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionPullReqBranchUpdated,
After: event.Payload.NewSHA,
Trigger: enum.TriggerHook,
Action: enum.TriggerActionPullReqBranchUpdated,
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
After: event.Payload.NewSHA,
}
err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID)
if err != nil {

View File

@ -9,6 +9,7 @@ import (
"fmt"
"github.com/harness/gitness/events"
"github.com/harness/gitness/internal/bootstrap"
gitevents "github.com/harness/gitness/internal/events/git"
"github.com/harness/gitness/internal/pipeline/triggerer"
"github.com/harness/gitness/types/enum"
@ -17,13 +18,14 @@ import (
func (s *Service) handleEventTagCreated(ctx context.Context,
event *events.Event[*gitevents.TagCreatedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionTagCreated,
Ref: event.Payload.Ref,
Before: event.Payload.SHA,
After: event.Payload.SHA,
Source: event.Payload.Ref,
Target: event.Payload.Ref,
Trigger: enum.TriggerHook,
Action: enum.TriggerActionTagCreated,
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
Ref: event.Payload.Ref,
Before: event.Payload.SHA,
After: event.Payload.SHA,
Source: event.Payload.Ref,
Target: event.Payload.Ref,
}
err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.SHA)
if err != nil {
@ -35,13 +37,14 @@ func (s *Service) handleEventTagCreated(ctx context.Context,
func (s *Service) handleEventTagUpdated(ctx context.Context,
event *events.Event[*gitevents.TagUpdatedPayload]) error {
hook := &triggerer.Hook{
Trigger: enum.TriggerHook,
Action: enum.TriggerActionTagUpdated,
Ref: event.Payload.Ref,
Before: event.Payload.OldSHA,
After: event.Payload.NewSHA,
Source: event.Payload.Ref,
Target: event.Payload.Ref,
Trigger: enum.TriggerHook,
Action: enum.TriggerActionTagUpdated,
TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID,
Ref: event.Payload.Ref,
Before: event.Payload.OldSHA,
After: event.Payload.NewSHA,
Source: event.Payload.Ref,
Target: event.Payload.Ref,
}
err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.NewSHA)
if err != nil {

View File

@ -113,7 +113,8 @@ func New(
stream.WithConcurrency(config.Concurrency),
stream.WithHandlerOptions(
stream.WithIdleTimeout(idleTimeout),
stream.WithMaxRetries(config.MaxRetries),
// retries not needed for builds which failed to trigger, can be adjusted when needed
stream.WithMaxRetries(0),
))
_ = r.RegisterCreated(service.handleEventPullReqCreated)
@ -156,6 +157,7 @@ func (s *Service) trigger(ctx context.Context, repoID int64,
pipeline, err := s.pipelineStore.Find(ctx, t.PipelineID)
if err != nil {
errs = multierror.Append(errs, err)
continue
}
_, err = s.triggerSvc.Trigger(ctx, pipeline, hook)

96
internal/sse/sse.go Normal file
View File

@ -0,0 +1,96 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package sse
import (
"context"
"encoding/json"
"fmt"
"strconv"
"github.com/harness/gitness/pubsub"
"github.com/harness/gitness/types/enum"
)
// Event is a server sent event.
type Event struct {
Type enum.SSEType `json:"type"`
Data json.RawMessage `json:"data"`
}
type Streamer interface {
// Publish publishes an event to a given space ID.
Publish(ctx context.Context, spaceID int64, eventType enum.SSEType, data any) error
// Streams streams the events on a space ID.
Stream(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, func(context.Context) error)
}
type pubsubStreamer struct {
pubsub pubsub.PubSub
namespace string
}
func NewStreamer(pubsub pubsub.PubSub, namespace string) Streamer {
return &pubsubStreamer{
pubsub: pubsub,
namespace: namespace,
}
}
func (e *pubsubStreamer) Publish(ctx context.Context, spaceID int64, eventType enum.SSEType, data any) error {
dataSerialized, err := json.Marshal(data)
if err != nil {
return fmt.Errorf("failed to serialize data: %w", err)
}
event := Event{
Type: eventType,
Data: dataSerialized,
}
serializedEvent, err := json.Marshal(event)
if err != nil {
return fmt.Errorf("failed to serialize event: %w", err)
}
namespaceOption := pubsub.WithPublishNamespace(e.namespace)
topic := getSpaceTopic(spaceID)
err = e.pubsub.Publish(ctx, topic, serializedEvent, namespaceOption)
if err != nil {
return fmt.Errorf("failed to publish event on pubsub: %w", err)
}
return nil
}
func (e *pubsubStreamer) Stream(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, func(context.Context) error) {
chEvent := make(chan *Event, 100) // TODO: check best size here
chErr := make(chan error)
g := func(payload []byte) error {
event := &Event{}
err := json.Unmarshal(payload, event)
if err != nil {
// This should never happen
return err
}
select {
case chEvent <- event:
default:
}
return nil
}
namespaceOption := pubsub.WithChannelNamespace(e.namespace)
topic := getSpaceTopic(spaceID)
consumer := e.pubsub.Subscribe(ctx, topic, g, namespaceOption)
unsubscribeFN := func(ctx context.Context) error {
return consumer.Unsubscribe(ctx, topic)
}
return chEvent, chErr, unsubscribeFN
}
// getSpaceTopic creates the namespace name which will be `spaces:<id>`
func getSpaceTopic(spaceID int64) string {
return "spaces:" + strconv.Itoa(int(spaceID))
}

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
package events
package sse
import (
"github.com/harness/gitness/pubsub"
@ -15,9 +15,7 @@ var WireSet = wire.NewSet(
ProvideEventsStreaming,
)
func ProvideEventsStreaming(pubsub pubsub.PubSub) EventsStreamer {
return &event{
pubsub: pubsub,
topic: "events",
}
func ProvideEventsStreaming(pubsub pubsub.PubSub) Streamer {
const namespace = "sse"
return NewStreamer(pubsub, namespace)
}

View File

@ -560,10 +560,6 @@ type (
// Update tries to update an execution.
Update(ctx context.Context, execution *types.Execution) error
// UpdateOptLock updates the execution using the optimistic locking mechanism.
UpdateOptLock(ctx context.Context, execution *types.Execution,
mutateFn func(execution *types.Execution) error) (*types.Execution, error)
// List lists the executions for a given pipeline ID
List(ctx context.Context, pipelineID int64, pagination types.Pagination) ([]*types.Execution, error)

View File

@ -38,11 +38,12 @@ type executionStore struct {
type execution struct {
ID int64 `db:"execution_id"`
PipelineID int64 `db:"execution_pipeline_id"`
CreatedBy int64 `db:"execution_created_by"`
RepoID int64 `db:"execution_repo_id"`
Trigger string `db:"execution_trigger"`
Number int64 `db:"execution_number"`
Parent int64 `db:"execution_parent"`
Status string `db:"execution_status"`
Status enum.CIStatus `db:"execution_status"`
Error string `db:"execution_error"`
Event string `db:"execution_event"`
Action string `db:"execution_action"`
@ -77,6 +78,7 @@ const (
executionColumns = `
execution_id
,execution_pipeline_id
,execution_created_by
,execution_repo_id
,execution_trigger
,execution_number
@ -149,6 +151,7 @@ func (s *executionStore) Create(ctx context.Context, execution *types.Execution)
INSERT INTO executions (
execution_pipeline_id
,execution_repo_id
,execution_created_by
,execution_trigger
,execution_number
,execution_parent
@ -184,6 +187,7 @@ func (s *executionStore) Create(ctx context.Context, execution *types.Execution)
) VALUES (
:execution_pipeline_id
,:execution_repo_id
,:execution_created_by
,:execution_trigger
,:execution_number
,:execution_parent
@ -284,33 +288,6 @@ func (s *executionStore) Update(ctx context.Context, e *types.Execution) error {
return nil
}
// UpdateOptLock updates the pipeline using the optimistic locking mechanism.
func (s *executionStore) UpdateOptLock(ctx context.Context,
execution *types.Execution,
mutateFn func(execution *types.Execution) error) (*types.Execution, error) {
for {
dup := *execution
err := mutateFn(&dup)
if err != nil {
return nil, err
}
err = s.Update(ctx, &dup)
if err == nil {
return &dup, nil
}
if !errors.Is(err, gitness_store.ErrVersionConflict) {
return nil, err
}
execution, err = s.FindByNumber(ctx, execution.PipelineID, execution.Number)
if err != nil {
return nil, err
}
}
}
// List lists the executions for a given pipeline ID.
// It orders them in descending order of execution number.
func (s *executionStore) List(

View File

@ -13,6 +13,7 @@ func mapInternalToExecution(in *execution) (*types.Execution, error) {
return &types.Execution{
ID: in.ID,
PipelineID: in.PipelineID,
CreatedBy: in.CreatedBy,
RepoID: in.RepoID,
Trigger: in.Trigger,
Number: in.Number,
@ -53,6 +54,7 @@ func mapExecutionToInternal(in *types.Execution) *execution {
return &execution{
ID: in.ID,
PipelineID: in.PipelineID,
CreatedBy: in.CreatedBy,
RepoID: in.RepoID,
Trigger: in.Trigger,
Number: in.Number,

View File

@ -0,0 +1,3 @@
ALTER TABLE repositories
DROP CONSTRAINT fk_repo_importing_job_uid,
DROP COLUMN repo_importing_job_uid;

View File

@ -0,0 +1,7 @@
ALTER TABLE repositories
ADD COLUMN repo_importing_job_uid TEXT,
ADD CONSTRAINT fk_repo_importing_job_uid
FOREIGN KEY (repo_importing_job_uid)
REFERENCES jobs(job_uid)
ON DELETE SET NULL
ON UPDATE NO ACTION;

View File

@ -0,0 +1,10 @@
DROP TABLE pipelines;
DROP TABLE executions;
DROP TABLE stages;
DROP TABLE secrets;
DROP TABLE steps;
DROP TABLE logs;
DROP TABLE plugins;
DROP TABLE connectors;
DROP TABLE templates;
DROP TABLE triggers;

View File

@ -0,0 +1,206 @@
CREATE TABLE pipelines (
pipeline_id SERIAL PRIMARY KEY,
pipeline_description TEXT NOT NULL,
pipeline_uid TEXT NOT NULL,
pipeline_seq INTEGER NOT NULL DEFAULT 0,
pipeline_disabled BOOLEAN NOT NULL,
pipeline_repo_id INTEGER NOT NULL,
pipeline_default_branch TEXT NOT NULL,
pipeline_created_by INTEGER NOT NULL,
pipeline_config_path TEXT NOT NULL,
pipeline_created BIGINT NOT NULL,
pipeline_updated BIGINT NOT NULL,
pipeline_version INTEGER NOT NULL,
UNIQUE (pipeline_repo_id, pipeline_uid),
CONSTRAINT fk_pipelines_repo_id FOREIGN KEY (pipeline_repo_id)
REFERENCES repositories (repo_id) ON DELETE CASCADE,
CONSTRAINT fk_pipelines_created_by FOREIGN KEY (pipeline_created_by)
REFERENCES principals (principal_id) ON DELETE NO ACTION
);
CREATE TABLE executions (
execution_id SERIAL PRIMARY KEY,
execution_pipeline_id INTEGER NOT NULL,
execution_repo_id INTEGER NOT NULL,
execution_created_by INTEGER NOT NULL,
execution_trigger TEXT NOT NULL,
execution_number INTEGER NOT NULL,
execution_parent INTEGER NOT NULL,
execution_status TEXT NOT NULL,
execution_error TEXT NOT NULL,
execution_event TEXT NOT NULL,
execution_action TEXT NOT NULL,
execution_link TEXT NOT NULL,
execution_timestamp INTEGER NOT NULL,
execution_title TEXT NOT NULL,
execution_message TEXT NOT NULL,
execution_before TEXT NOT NULL,
execution_after TEXT NOT NULL,
execution_ref TEXT NOT NULL,
execution_source_repo TEXT NOT NULL,
execution_source TEXT NOT NULL,
execution_target TEXT NOT NULL,
execution_author TEXT NOT NULL,
execution_author_name TEXT NOT NULL,
execution_author_email TEXT NOT NULL,
execution_author_avatar TEXT NOT NULL,
execution_sender TEXT NOT NULL,
execution_params TEXT NOT NULL,
execution_cron TEXT NOT NULL,
execution_deploy TEXT NOT NULL,
execution_deploy_id INTEGER NOT NULL,
execution_debug BOOLEAN NOT NULL DEFAULT false,
execution_started BIGINT NOT NULL,
execution_finished BIGINT NOT NULL,
execution_created BIGINT NOT NULL,
execution_updated BIGINT NOT NULL,
execution_version INTEGER NOT NULL,
UNIQUE (execution_pipeline_id, execution_number),
CONSTRAINT fk_executions_pipeline_id FOREIGN KEY (execution_pipeline_id)
REFERENCES pipelines (pipeline_id) ON DELETE CASCADE,
CONSTRAINT fk_executions_repo_id FOREIGN KEY (execution_repo_id)
REFERENCES repositories (repo_id) ON DELETE CASCADE,
CONSTRAINT fk_executions_created_by FOREIGN KEY (execution_created_by)
REFERENCES principals (principal_id) ON DELETE NO ACTION
);
CREATE TABLE secrets (
secret_id SERIAL PRIMARY KEY,
secret_uid TEXT NOT NULL,
secret_space_id INTEGER NOT NULL,
secret_description TEXT NOT NULL,
secret_data BYTEA NOT NULL,
secret_created_by INTEGER NOT NULL,
secret_created BIGINT NOT NULL,
secret_updated BIGINT NOT NULL,
secret_version INTEGER NOT NULL,
UNIQUE (secret_space_id, secret_uid),
CONSTRAINT fk_secrets_space_id FOREIGN KEY (secret_space_id)
REFERENCES spaces (space_id) ON DELETE CASCADE,
CONSTRAINT fk_secrets_created_by FOREIGN KEY (secret_created_by)
REFERENCES principals (principal_id) ON DELETE NO ACTION
);
CREATE TABLE stages (
stage_id SERIAL PRIMARY KEY,
stage_execution_id INTEGER NOT NULL,
stage_repo_id INTEGER NOT NULL,
stage_number INTEGER NOT NULL,
stage_kind TEXT NOT NULL,
stage_type TEXT NOT NULL,
stage_name TEXT NOT NULL,
stage_status TEXT NOT NULL,
stage_error TEXT NOT NULL,
stage_parent_group_id INTEGER NOT NULL,
stage_errignore BOOLEAN NOT NULL,
stage_exit_code INTEGER NOT NULL,
stage_limit INTEGER NOT NULL,
stage_os TEXT NOT NULL,
stage_arch TEXT NOT NULL,
stage_variant TEXT NOT NULL,
stage_kernel TEXT NOT NULL,
stage_machine TEXT NOT NULL,
stage_started BIGINT NOT NULL,
stage_stopped BIGINT NOT NULL,
stage_created BIGINT NOT NULL,
stage_updated BIGINT NOT NULL,
stage_version INTEGER NOT NULL,
stage_on_success BOOLEAN NOT NULL,
stage_on_failure BOOLEAN NOT NULL,
stage_depends_on TEXT NOT NULL,
stage_labels TEXT NOT NULL,
stage_limit_repo INTEGER NOT NULL DEFAULT 0,
UNIQUE (stage_execution_id, stage_number),
CONSTRAINT fk_stages_execution_id FOREIGN KEY (stage_execution_id)
REFERENCES executions (execution_id) ON DELETE CASCADE
);
CREATE INDEX ix_stage_in_progress ON stages (stage_status)
WHERE stage_status IN ('pending', 'running');
CREATE TABLE steps (
step_id SERIAL PRIMARY KEY,
step_stage_id INTEGER NOT NULL,
step_number INTEGER NOT NULL,
step_name TEXT NOT NULL,
step_status TEXT NOT NULL,
step_error TEXT NOT NULL,
step_parent_group_id INTEGER NOT NULL,
step_errignore BOOLEAN NOT NULL,
step_exit_code INTEGER NOT NULL,
step_started BIGINT NOT NULL,
step_stopped BIGINT NOT NULL,
step_version INTEGER NOT NULL,
step_depends_on TEXT NOT NULL,
step_image TEXT NOT NULL,
step_detached BOOLEAN NOT NULL,
step_schema TEXT NOT NULL,
UNIQUE (step_stage_id, step_number),
CONSTRAINT fk_steps_stage_id FOREIGN KEY (step_stage_id)
REFERENCES stages (stage_id) ON DELETE CASCADE
);
CREATE TABLE logs (
log_id SERIAL PRIMARY KEY,
log_data BYTEA NOT NULL,
CONSTRAINT fk_logs_id FOREIGN KEY (log_id)
REFERENCES steps (step_id) ON DELETE CASCADE
);
CREATE TABLE connectors (
connector_id SERIAL PRIMARY KEY,
connector_uid TEXT NOT NULL,
connector_description TEXT NOT NULL,
connector_type TEXT NOT NULL,
connector_space_id INTEGER NOT NULL,
connector_data TEXT NOT NULL,
connector_created BIGINT NOT NULL,
connector_updated BIGINT NOT NULL,
connector_version INTEGER NOT NULL,
UNIQUE (connector_space_id, connector_uid),
CONSTRAINT fk_connectors_space_id FOREIGN KEY (connector_space_id)
REFERENCES spaces (space_id) ON DELETE CASCADE
);
CREATE TABLE templates (
template_id SERIAL PRIMARY KEY,
template_uid TEXT NOT NULL,
template_description TEXT NOT NULL,
template_space_id INTEGER NOT NULL,
template_data TEXT NOT NULL,
template_created BIGINT NOT NULL,
template_updated BIGINT NOT NULL,
template_version INTEGER NOT NULL,
UNIQUE (template_space_id, template_uid),
CONSTRAINT fk_templates_space_id FOREIGN KEY (template_space_id)
REFERENCES spaces (space_id) ON DELETE CASCADE
);
CREATE TABLE triggers (
trigger_id SERIAL PRIMARY KEY,
trigger_uid TEXT NOT NULL,
trigger_pipeline_id INTEGER NOT NULL,
trigger_type TEXT NOT NULL,
trigger_repo_id INTEGER NOT NULL,
trigger_secret TEXT NOT NULL,
trigger_description TEXT NOT NULL,
trigger_disabled BOOLEAN NOT NULL,
trigger_created_by INTEGER NOT NULL,
trigger_actions TEXT NOT NULL,
trigger_created BIGINT NOT NULL,
trigger_updated BIGINT NOT NULL,
trigger_version INTEGER NOT NULL,
UNIQUE (trigger_pipeline_id, trigger_uid),
CONSTRAINT fk_triggers_pipeline_id FOREIGN KEY (trigger_pipeline_id)
REFERENCES pipelines (pipeline_id) ON DELETE CASCADE,
CONSTRAINT fk_triggers_repo_id FOREIGN KEY (trigger_repo_id)
REFERENCES repositories (repo_id) ON DELETE CASCADE
);
CREATE TABLE plugins (
plugin_uid TEXT NOT NULL,
plugin_description TEXT NOT NULL,
plugin_logo TEXT NOT NULL,
plugin_spec BYTEA NOT NULL,
UNIQUE (plugin_uid)
);

View File

@ -0,0 +1 @@
ALTER TABLE repositories ADD COLUMN repo_importing_job_uid TEXT;

View File

@ -0,0 +1 @@
ALTER TABLE repositories DROP COLUMN repo_importing_job_uid;

View File

@ -0,0 +1,10 @@
DROP TABLE pipelines;
DROP TABLE executions;
DROP TABLE stages;
DROP TABLE secrets;
DROP TABLE steps;
DROP TABLE logs;
DROP TABLE plugins;
DROP TABLE connectors;
DROP TABLE templates;
DROP TABLE triggers;

View File

@ -1,20 +1,12 @@
DROP TABLE IF exists pipelines;
DROP TABLE IF exists executions;
DROP TABLE IF exists stages;
DROP TABLE IF exists secrets;
DROP TABLE IF exists steps;
DROP TABLE IF exists logs;
DROP TABLE IF exists plugins;
DROP TABLE IF exists connectors;
DROP TABLE IF exists templates;
DROP TABLE IF exists triggers;
CREATE TABLE pipelines (
pipeline_id INTEGER PRIMARY KEY AUTOINCREMENT
,pipeline_description TEXT NOT NULL
,pipeline_uid TEXT NOT NULL
,pipeline_seq INTEGER NOT NULL DEFAULT 0
,pipeline_disabled BOOLEAN NOT NULL
,pipeline_repo_id INTEGER NOT NULL
,pipeline_default_branch TEXT NOT NULL
,pipeline_created_by INTEGER NOT NULL
,pipeline_config_path TEXT NOT NULL
,pipeline_created INTEGER NOT NULL
,pipeline_updated INTEGER NOT NULL
@ -28,12 +20,19 @@ CREATE TABLE pipelines (
REFERENCES repositories (repo_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE CASCADE
-- Foreign key to principals table
,CONSTRAINT fk_pipelines_created_by FOREIGN KEY (pipeline_created_by)
REFERENCES principals (principal_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
);
CREATE TABLE executions (
execution_id INTEGER PRIMARY KEY AUTOINCREMENT
,execution_pipeline_id INTEGER NOT NULL
,execution_repo_id INTEGER NOT NULL
,execution_created_by INTEGER NOT NULL
,execution_trigger TEXT NOT NULL
,execution_number INTEGER NOT NULL
,execution_parent INTEGER NOT NULL
@ -81,6 +80,12 @@ CREATE TABLE executions (
REFERENCES repositories (repo_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE CASCADE
-- Foreign key to principals table
,CONSTRAINT fk_executions_created_by FOREIGN KEY (execution_created_by)
REFERENCES principals (principal_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
);
CREATE TABLE secrets (
@ -92,6 +97,7 @@ CREATE TABLE secrets (
,secret_created INTEGER NOT NULL
,secret_updated INTEGER NOT NULL
,secret_version INTEGER NOT NULL
,secret_created_by INTEGER NOT NULL
-- Ensure unique combination of space ID and UID
,UNIQUE (secret_space_id, secret_uid)
@ -101,6 +107,12 @@ CREATE TABLE secrets (
REFERENCES spaces (space_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE CASCADE
-- Foreign key to principals table
,CONSTRAINT fk_secrets_created_by FOREIGN KEY (secret_created_by)
REFERENCES principals (principal_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
);
CREATE TABLE stages (
@ -189,34 +201,6 @@ CREATE TABLE logs (
ON DELETE CASCADE
);
-- Insert some pipelines
INSERT INTO pipelines (
pipeline_id, pipeline_description, pipeline_uid, pipeline_seq,
pipeline_repo_id, pipeline_default_branch,
pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version
) VALUES (
1, 'Sample Pipeline 1', 'pipeline_uid_1', 2, 1,
'main', 'config_path_1', 1678932000, 1678932100, 1
);
INSERT INTO pipelines (
pipeline_id, pipeline_description, pipeline_uid, pipeline_seq,
pipeline_repo_id, pipeline_default_branch,
pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version
) VALUES (
2, 'Sample Pipeline 2', 'pipeline_uid_2', 0, 1,
'develop', 'config_path_2', 1678932200, 1678932300, 1
);
INSERT INTO pipelines (
pipeline_id, pipeline_description, pipeline_uid, pipeline_seq,
pipeline_repo_id, pipeline_default_branch,
pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version
) VALUES (
3, 'Sample Pipeline 3', 'pipeline_uid_3', 0, 1,
'develop', 'config_path_2', 1678932200000, 1678932300000, 1
);
CREATE TABLE connectors (
connector_id INTEGER PRIMARY KEY AUTOINCREMENT
,connector_uid TEXT NOT NULL
@ -262,10 +246,11 @@ CREATE TABLE triggers (
trigger_id INTEGER PRIMARY KEY AUTOINCREMENT
,trigger_uid TEXT NOT NULL
,trigger_pipeline_id INTEGER NOT NULL
,trigger_type TEXT NOT NULL
,trigger_repo_id INTEGER NOT NULL
,trigger_secret TEXT NOT NULL
,trigger_description TEXT NOT NULL
,trigger_enabled BOOLEAN NOT NULL
,trigger_disabled BOOLEAN NOT NULL
,trigger_created_by INTEGER NOT NULL
,trigger_actions TEXT NOT NULL
,trigger_created INTEGER NOT NULL
@ -296,20 +281,4 @@ CREATE TABLE plugins (
-- Ensure unique plugin names
,UNIQUE(plugin_uid)
);
INSERT INTO plugins (plugin_uid, plugin_description, plugin_logo, plugin_spec)
VALUES
('plugins/slack', 'A sample slack plugin', 'slack.png',
'inputs:
channel:
type: string
token:
type: string
steps:
- type: script
spec:
image: plugins/slack
envs:
PLUGIN_CHANNEL: <+ inputs.channel >');
);

View File

@ -31,6 +31,7 @@ const (
pipelineColumns = `
pipeline_id
,pipeline_description
,pipeline_created_by
,pipeline_uid
,pipeline_seq
,pipeline_repo_id
@ -87,6 +88,8 @@ func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) er
,pipeline_uid
,pipeline_seq
,pipeline_repo_id
,pipeline_disabled
,pipeline_created_by
,pipeline_default_branch
,pipeline_config_path
,pipeline_created
@ -97,6 +100,8 @@ func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) er
:pipeline_uid,
:pipeline_seq,
:pipeline_repo_id,
:pipeline_disabled,
:pipeline_created_by,
:pipeline_default_branch,
:pipeline_config_path,
:pipeline_created,
@ -229,7 +234,7 @@ func (s *pipelineStore) ListLatest(
`
// Create a subquery to get max execution IDs for each unique execution pipeline ID.
subquery := database.Builder.
Select("execution_pipeline_id, execution_id, MAX(execution_number)").
Select("execution_pipeline_id, MAX(execution_id) AS execution_id").
From("executions").
Where("execution_repo_id = ?").
GroupBy("execution_pipeline_id")

View File

@ -8,6 +8,7 @@ import (
"database/sql"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
)
// pipelineExecutionjoin struct represents a joined row between pipelines and executions
@ -15,6 +16,7 @@ type pipelineExecutionJoin struct {
*types.Pipeline
ID sql.NullInt64 `db:"execution_id"`
PipelineID sql.NullInt64 `db:"execution_pipeline_id"`
Action sql.NullString `db:"execution_action"`
Message sql.NullString `db:"execution_message"`
After sql.NullString `db:"execution_after"`
RepoID sql.NullInt64 `db:"execution_repo_id"`
@ -56,11 +58,12 @@ func convertPipelineJoin(join *pipelineExecutionJoin) *types.Pipeline {
ID: join.ID.Int64,
PipelineID: join.PipelineID.Int64,
RepoID: join.RepoID.Int64,
Action: join.Action.String,
Trigger: join.Trigger.String,
Number: join.Number.Int64,
After: join.After.String,
Message: join.Message.String,
Status: join.Status.String,
Status: enum.ParseCIStatus(join.Status.String),
Error: join.Error.String,
Link: join.Link.String,
Timestamp: join.Timestamp.Int64,

View File

@ -59,8 +59,7 @@ const (
,repo_num_closed_pulls
,repo_num_open_pulls
,repo_num_merged_pulls
,repo_importing
,repo_importing_job_uid`
,repo_importing`
repoSelectBaseWithJoin = `
SELECT` + repoColumnsForJoin + `
@ -127,7 +126,6 @@ func (s *RepoStore) Create(ctx context.Context, repo *types.Repository) error {
,repo_num_open_pulls
,repo_num_merged_pulls
,repo_importing
,repo_importing_job_uid
) values (
:repo_version
,:repo_parent_id
@ -147,7 +145,6 @@ func (s *RepoStore) Create(ctx context.Context, repo *types.Repository) error {
,:repo_num_open_pulls
,:repo_num_merged_pulls
,:repo_importing
,:repo_importing_job_uid
) RETURNING repo_id`
db := dbtx.GetAccessor(ctx, s.db)
@ -185,7 +182,6 @@ func (s *RepoStore) Update(ctx context.Context, repo *types.Repository) error {
,repo_num_open_pulls = :repo_num_open_pulls
,repo_num_merged_pulls = :repo_num_merged_pulls
,repo_importing = :repo_importing
,repo_importing_job_uid = :repo_importing_job_uid
WHERE repo_id = :repo_id AND repo_version = :repo_version - 1`
updatedAt := time.Now()

View File

@ -31,6 +31,7 @@ const (
secret_id,
secret_description,
secret_space_id,
secret_created_by,
secret_uid,
secret_data,
secret_created,
@ -82,6 +83,7 @@ func (s *secretStore) Create(ctx context.Context, secret *types.Secret) error {
INSERT INTO secrets (
secret_description,
secret_space_id,
secret_created_by,
secret_uid,
secret_data,
secret_created,
@ -90,6 +92,7 @@ func (s *secretStore) Create(ctx context.Context, secret *types.Secret) error {
) VALUES (
:secret_description,
:secret_space_id,
:secret_created_by,
:secret_uid,
:secret_data,
:secret_created,

View File

@ -14,6 +14,7 @@ import (
"github.com/harness/gitness/store/database"
"github.com/harness/gitness/store/database/dbtx"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/jmoiron/sqlx"
sqlxtypes "github.com/jmoiron/sqlx/types"
@ -61,7 +62,7 @@ type stage struct {
Name string `db:"stage_name"`
Kind string `db:"stage_kind"`
Type string `db:"stage_type"`
Status string `db:"stage_status"`
Status enum.CIStatus `db:"stage_status"`
Error string `db:"stage_error"`
ParentGroupID int64 `db:"stage_parent_group_id"`
ErrIgnore bool `db:"stage_errignore"`
@ -265,9 +266,17 @@ func (s *stageStore) Update(ctx context.Context, st *types.Stage) error {
SET
stage_status = :stage_status
,stage_machine = :stage_machine
,stage_started = :stage_started
,stage_stopped = :stage_stopped
,stage_exit_code = :stage_exit_code
,stage_updated = :stage_updated
,stage_version = :stage_version
,stage_error = :stage_error
,stage_on_success = :stage_on_success
,stage_on_failure = :stage_on_failure
,stage_errignore = :stage_errignore
,stage_depends_on = :stage_depends_on
,stage_labels = :stage_labels
WHERE stage_id = :stage_id AND stage_version = :stage_version - 1`
updatedAt := time.Now()
steps := st.Steps

View File

@ -10,6 +10,7 @@ import (
"fmt"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
sqlxtypes "github.com/jmoiron/sqlx/types"
"github.com/pkg/errors"
@ -46,7 +47,7 @@ func convertFromNullStep(nullstep *nullstep) (*types.Step, error) {
StageID: nullstep.StageID.Int64,
Number: nullstep.Number.Int64,
Name: nullstep.Name.String,
Status: nullstep.Status.String,
Status: enum.ParseCIStatus(nullstep.Status.String),
Error: nullstep.Error.String,
ErrIgnore: nullstep.ErrIgnore.Bool,
ExitCode: int(nullstep.ExitCode.Int64),

View File

@ -13,6 +13,7 @@ import (
"github.com/harness/gitness/store/database"
"github.com/harness/gitness/store/database/dbtx"
"github.com/harness/gitness/types"
"github.com/harness/gitness/types/enum"
"github.com/jmoiron/sqlx"
sqlxtypes "github.com/jmoiron/sqlx/types"
@ -46,7 +47,7 @@ type step struct {
Number int64 `db:"step_number"`
ParentGroupID int64 `db:"step_parent_group_id"`
Name string `db:"step_name"`
Status string `db:"step_status"`
Status enum.CIStatus `db:"step_status"`
Error string `db:"step_error"`
ErrIgnore bool `db:"step_errignore"`
ExitCode int `db:"step_exit_code"`

View File

@ -29,11 +29,12 @@ type trigger struct {
ID int64 `db:"trigger_id"`
UID string `db:"trigger_uid"`
Description string `db:"trigger_description"`
Type string `db:"trigger_type"`
Secret string `db:"trigger_secret"`
PipelineID int64 `db:"trigger_pipeline_id"`
RepoID int64 `db:"trigger_repo_id"`
CreatedBy int64 `db:"trigger_created_by"`
Enabled bool `db:"trigger_enabled"`
Disabled bool `db:"trigger_disabled"`
Actions sqlxtypes.JSONText `db:"trigger_actions"`
Created int64 `db:"trigger_created"`
Updated int64 `db:"trigger_updated"`
@ -50,11 +51,12 @@ func mapInternalToTrigger(trigger *trigger) (*types.Trigger, error) {
return &types.Trigger{
ID: trigger.ID,
Description: trigger.Description,
Type: trigger.Type,
Secret: trigger.Secret,
PipelineID: trigger.PipelineID,
RepoID: trigger.RepoID,
CreatedBy: trigger.CreatedBy,
Enabled: trigger.Enabled,
Disabled: trigger.Disabled,
Actions: actions,
UID: trigger.UID,
Created: trigger.Created,
@ -80,11 +82,12 @@ func mapTriggerToInternal(t *types.Trigger) *trigger {
ID: t.ID,
UID: t.UID,
Description: t.Description,
Type: t.Type,
PipelineID: t.PipelineID,
Secret: t.Secret,
RepoID: t.RepoID,
CreatedBy: t.CreatedBy,
Enabled: t.Enabled,
Disabled: t.Disabled,
Actions: EncodeToSQLXJSON(t.Actions),
Created: t.Created,
Updated: t.Updated,
@ -107,7 +110,7 @@ const (
triggerColumns = `
trigger_id
,trigger_uid
,trigger_enabled
,trigger_disabled
,trigger_actions
,trigger_description
,trigger_pipeline_id
@ -139,7 +142,8 @@ func (s *triggerStore) Create(ctx context.Context, t *types.Trigger) error {
trigger_uid
,trigger_description
,trigger_actions
,trigger_enabled
,trigger_disabled
,trigger_type
,trigger_secret
,trigger_created_by
,trigger_pipeline_id
@ -151,7 +155,8 @@ func (s *triggerStore) Create(ctx context.Context, t *types.Trigger) error {
:trigger_uid
,:trigger_description
,:trigger_actions
,:trigger_enabled
,:trigger_disabled
,:trigger_type
,:trigger_secret
,:trigger_created_by
,:trigger_pipeline_id
@ -286,7 +291,7 @@ func (s *triggerStore) ListAllEnabled(
stmt := database.Builder.
Select(triggerColumns).
From("triggers").
Where("trigger_repo_id = ? AND trigger_enabled = true", fmt.Sprint(repoID))
Where("trigger_repo_id = ? AND trigger_disabled = false", fmt.Sprint(repoID))
sql, args, err := stmt.ToSql()
if err != nil {

View File

@ -98,9 +98,12 @@ func (r *InMemory) Publish(ctx context.Context, topic string, payload []byte, op
}
topic = formatTopic(pubConfig.app, pubConfig.namespace, topic)
wg := sync.WaitGroup{}
for _, sub := range r.registry {
if slices.Contains(sub.topics, topic) && !sub.isClosed() {
wg.Add(1)
go func(subscriber *inMemorySubscriber) {
defer wg.Done()
// timer is based on subscriber data
t := time.NewTimer(subscriber.config.sendTimeout)
defer t.Stop()
@ -118,6 +121,10 @@ func (r *InMemory) Publish(ctx context.Context, topic string, payload []byte, op
}
}
// Wait for all subscribers to complete
// Otherwise, we might fail notifying some subscribers due to context completion.
wg.Wait()
return nil
}

View File

@ -53,3 +53,11 @@ type ReqCheck struct {
type CheckPayloadText struct {
Details string `json:"details"`
}
// CheckPayloadInternal is for internal use for more seamless integration for
// gitness CI status checks.
type CheckPayloadInternal struct {
Number int64 `json:"execution_number"`
RepoID int64 `json:"repo_id"`
PipelineID int64 `json:"pipeline_id"`
}

View File

@ -44,10 +44,12 @@ const (
CheckPayloadKindEmpty CheckPayloadKind = ""
CheckPayloadKindRaw CheckPayloadKind = "raw"
CheckPayloadKindMarkdown CheckPayloadKind = "markdown"
CheckPayloadKindPipeline CheckPayloadKind = "pipeline"
)
var checkPayloadTypes = sortEnum([]CheckPayloadKind{
CheckPayloadKindEmpty,
CheckPayloadKindRaw,
CheckPayloadKindMarkdown,
CheckPayloadKindPipeline,
})

View File

@ -5,15 +5,66 @@
// Status types for CI.
package enum
const (
CIStatusSkipped = "skipped"
CIStatusBlocked = "blocked"
CIStatusDeclined = "declined"
CIStatusWaitingOnDeps = "waiting_on_dependencies"
CIStatusPending = "pending"
CIStatusRunning = "running"
CIStatusSuccess = "success"
CIStatusFailure = "failure"
CIStatusKilled = "killed"
CIStatusError = "error"
import (
"strings"
)
// CIStatus defines the different kinds of CI statuses for
// stages, steps and executions.
type CIStatus string
const (
CIStatusSkipped CIStatus = "skipped"
CIStatusBlocked CIStatus = "blocked"
CIStatusDeclined CIStatus = "declined"
CIStatusWaitingOnDeps CIStatus = "waiting_on_dependencies"
CIStatusPending CIStatus = "pending"
CIStatusRunning CIStatus = "running"
CIStatusSuccess CIStatus = "success"
CIStatusFailure CIStatus = "failure"
CIStatusKilled CIStatus = "killed"
CIStatusError CIStatus = "error"
)
func (status CIStatus) ConvertToCheckStatus() CheckStatus {
if status == CIStatusPending || status == CIStatusWaitingOnDeps {
return CheckStatusPending
}
if status == CIStatusSuccess || status == CIStatusSkipped {
return CheckStatusSuccess
}
if status == CIStatusFailure {
return CheckStatusFailure
}
if status == CIStatusRunning {
return CheckStatusRunning
}
return CheckStatusError
}
// ParseCIStatus converts the status from a string to typed enum.
// If the match is not exact, will just return default error status
// instead of explicitly returning not found error.
func ParseCIStatus(status string) CIStatus {
switch strings.ToLower(status) {
case "skipped", "blocked", "declined", "waiting_on_dependencies", "pending", "running", "success", "failure", "killed", "error":
return CIStatus(strings.ToLower(status))
case "": // just in case status is not passed through
return CIStatusPending
default:
return CIStatusError
}
}
// IsDone returns true if the build has a completed state.
func (status CIStatus) IsDone() bool {
switch status {
case CIStatusWaitingOnDeps,
CIStatusPending,
CIStatusRunning,
CIStatusBlocked:
return false
default:
return true
}
}

View File

@ -1,15 +0,0 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
// Enums for event types delivered to the event stream for the UI
package enum
// EventType defines the kind of event
type EventType string
const (
ExecutionUpdated = "execution_updated"
ExecutionRunning = "execution_running"
ExecutionCompleted = "execution_completed"
)

18
types/enum/sse.go Normal file
View File

@ -0,0 +1,18 @@
// Copyright 2022 Harness Inc. All rights reserved.
// Use of this source code is governed by the Polyform Free Trial License
// that can be found in the LICENSE.md file for this repository.
// Enums for event types delivered to the event stream for the UI
package enum
// SSEType defines the kind of server sent event
type SSEType string
const (
SSETypeExecutionUpdated = "execution_updated"
SSETypeExecutionRunning = "execution_running"
SSETypeExecutionCompleted = "execution_completed"
SSETypeExecutionCanceled = "execution_canceled"
SSETypeRepositoryImportCompleted = "repository_import_completed"
)

View File

@ -4,15 +4,18 @@
package types
import "github.com/harness/gitness/types/enum"
// Execution represents an instance of a pipeline execution.
type Execution struct {
ID int64 `json:"-"`
PipelineID int64 `json:"pipeline_id"`
CreatedBy int64 `json:"created_by"`
RepoID int64 `json:"repo_id"`
Trigger string `json:"trigger,omitempty"`
Number int64 `json:"number"`
Parent int64 `json:"parent,omitempty"`
Status string `json:"status"`
Status enum.CIStatus `json:"status"`
Error string `json:"error,omitempty"`
Event string `json:"event,omitempty"`
Action string `json:"action,omitempty"`

View File

@ -8,6 +8,8 @@ type Pipeline struct {
ID int64 `db:"pipeline_id" json:"id"`
Description string `db:"pipeline_description" json:"description"`
UID string `db:"pipeline_uid" json:"uid"`
Disabled bool `db:"pipeline_disabled" json:"disabled"`
CreatedBy int64 `db:"pipeline_created_by" json:"created_by"`
Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline
RepoID int64 `db:"pipeline_repo_id" json:"repo_id"`
DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"`

View File

@ -28,15 +28,13 @@ type Repository struct {
ForkID int64 `db:"repo_fork_id" json:"fork_id"`
PullReqSeq int64 `db:"repo_pullreq_seq" json:"-"`
// TODO: Check if we want to keep those values here
NumForks int `db:"repo_num_forks" json:"num_forks"`
NumPulls int `db:"repo_num_pulls" json:"num_pulls"`
NumClosedPulls int `db:"repo_num_closed_pulls" json:"num_closed_pulls"`
NumOpenPulls int `db:"repo_num_open_pulls" json:"num_open_pulls"`
NumMergedPulls int `db:"repo_num_merged_pulls" json:"num_merged_pulls"`
Importing bool `db:"repo_importing" json:"importing"`
ImportingJobUID *string `db:"repo_importing_job_uid" json:"-"`
Importing bool `db:"repo_importing" json:"importing"`
// git urls
GitURL string `db:"-" json:"git_url"`

View File

@ -8,6 +8,7 @@ type Secret struct {
ID int64 `db:"secret_id" json:"id"`
Description string `db:"secret_description" json:"description"`
SpaceID int64 `db:"secret_space_id" json:"space_id"`
CreatedBy int64 `db:"secret_created_by" json:"created_by"`
UID string `db:"secret_uid" json:"uid"`
Data string `db:"secret_data" json:"-"`
Created int64 `db:"secret_created" json:"created"`

View File

@ -4,6 +4,8 @@
package types
import "github.com/harness/gitness/types/enum"
type Stage struct {
ID int64 `json:"-"`
ExecutionID int64 `json:"execution_id"`
@ -12,7 +14,7 @@ type Stage struct {
Name string `json:"name"`
Kind string `json:"kind,omitempty"`
Type string `json:"type,omitempty"`
Status string `json:"status"`
Status enum.CIStatus `json:"status"`
Error string `json:"error,omitempty"`
ErrIgnore bool `json:"errignore,omitempty"`
ExitCode int `json:"exit_code"`

View File

@ -4,20 +4,22 @@
package types
import "github.com/harness/gitness/types/enum"
type Step struct {
ID int64 `json:"-"`
StageID int64 `json:"-"`
Number int64 `json:"number"`
Name string `json:"name"`
Status string `json:"status"`
Error string `json:"error,omitempty"`
ErrIgnore bool `json:"errignore,omitempty"`
ExitCode int `json:"exit_code"`
Started int64 `json:"started,omitempty"`
Stopped int64 `json:"stopped,omitempty"`
Version int64 `json:"-" db:"step_version"`
DependsOn []string `json:"depends_on,omitempty"`
Image string `json:"image,omitempty"`
Detached bool `json:"detached"`
Schema string `json:"schema,omitempty"`
ID int64 `json:"-"`
StageID int64 `json:"-"`
Number int64 `json:"number"`
Name string `json:"name"`
Status enum.CIStatus `json:"status"`
Error string `json:"error,omitempty"`
ErrIgnore bool `json:"errignore,omitempty"`
ExitCode int `json:"exit_code"`
Started int64 `json:"started,omitempty"`
Stopped int64 `json:"stopped,omitempty"`
Version int64 `json:"-" db:"step_version"`
DependsOn []string `json:"depends_on,omitempty"`
Image string `json:"image,omitempty"`
Detached bool `json:"detached"`
Schema string `json:"schema,omitempty"`
}

View File

@ -9,11 +9,12 @@ import "github.com/harness/gitness/types/enum"
type Trigger struct {
ID int64 `json:"id"`
Description string `json:"description"`
Type string `json:"trigger_type"`
PipelineID int64 `json:"pipeline_id"`
Secret string `json:"-"`
RepoID int64 `json:"repo_id"`
CreatedBy int64 `json:"created_by"`
Enabled bool `json:"enabled"`
Disabled bool `json:"disabled"`
Actions []enum.TriggerAction `json:"actions"`
UID string `json:"uid"`
Created int64 `json:"created"`

View File

@ -101,10 +101,6 @@ rules:
- lodash.*
paths:
- lodash
- name: yaml
importNames:
- stringify
message: 'Please use yamlStringify from @common/utils/YamlHelperMethods instead of this'
overrides:
- files:

View File

@ -47,6 +47,7 @@ export interface CODERoutes {
toCODESpaceSettings: (args: Required<Pick<CODEProps, 'space'>>) => string
toCODEPipelines: (args: Required<Pick<CODEProps, 'repoPath'>>) => string
toCODEPipelineEdit: (args: Required<Pick<CODEProps, 'repoPath' | 'pipeline'>>) => string
toCODEPipelineSettings: (args: Required<Pick<CODEProps, 'repoPath' | 'pipeline'>>) => string
toCODESecrets: (args: Required<Pick<CODEProps, 'space'>>) => string
toCODEGlobalSettings: () => string
@ -98,6 +99,7 @@ export const routes: CODERoutes = {
toCODESpaceSettings: ({ space }) => `/settings/${space}`,
toCODEPipelines: ({ repoPath }) => `/${repoPath}/pipelines`,
toCODEPipelineEdit: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}/edit`,
toCODEPipelineSettings: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}/triggers`,
toCODESecrets: ({ space }) => `/secrets/${space}`,
toCODEGlobalSettings: () => '/settings',

View File

@ -33,6 +33,7 @@ import Secret from 'pages/Secret/Secret'
import Search from 'pages/Search/Search'
import AddUpdatePipeline from 'pages/AddUpdatePipeline/AddUpdatePipeline'
import { useAppContext } from 'AppContext'
import PipelineSettings from 'components/PipelineSettings/PipelineSettings'
export const RouteDestinations: React.FC = React.memo(function RouteDestinations() {
const { getString } = useStrings()
@ -194,6 +195,14 @@ export const RouteDestinations: React.FC = React.memo(function RouteDestinations
</Route>
)}
{standalone && (
<Route path={routes.toCODEPipelineSettings({ repoPath, pipeline: pathProps.pipeline })} exact>
<LayoutWithSideNav title={getString('pageTitle.pipelines')}>
<PipelineSettings />
</LayoutWithSideNav>
</Route>
)}
{standalone && (
<Route path={routes.toCODEPipelines({ repoPath })} exact>
<LayoutWithSideNav title={getString('pageTitle.pipelines')}>

View File

@ -36,7 +36,7 @@ const CloneCredentialDialog = (props: CloneCredentialDialogProps) => {
},
[mutate, showError]
)
const tokenData = standalone ? false : hooks?.useGenerateToken?.(hash, currentUser.uid, flag)
const tokenData = standalone ? false : hooks?.useGenerateToken?.(hash, currentUser?.uid, flag)
useEffect(() => {
if (tokenData) {

View File

@ -47,7 +47,6 @@ export const Editor = React.memo(function CodeMirrorReactEditor({
onViewUpdate,
darkTheme
}: EditorProps) {
const contentRef = useRef(content)
const view = useRef<EditorView>()
const ref = useRef<HTMLDivElement>()
const languageConfig = useMemo(() => new Compartment(), [])
@ -139,14 +138,5 @@ export const Editor = React.memo(function CodeMirrorReactEditor({
}
}, [filename, forMarkdown, view, languageConfig, markdownLanguageSupport])
useEffect(() => {
if (contentRef.current !== content) {
contentRef.current = content
viewRef?.current?.dispatch({
changes: { from: 0, to: viewRef?.current?.state.doc.length, insert: content }
})
}
}, [content, viewRef])
return <Container ref={ref} className={cx(css.editor, className)} style={style} />
})

View File

@ -10,7 +10,7 @@ import { useGetSpaceParam } from 'hooks/useGetSpaceParam'
import type { CODEProps } from 'RouteDefinitions'
import type { GitInfoProps } from 'utils/GitUtils'
import { ExecutionStatus } from 'components/ExecutionStatus/ExecutionStatus'
import { getStatus } from 'utils/PipelineUtils'
import { getStatus } from 'utils/ExecutionUtils'
import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator'
import { timeDistance } from 'utils/Utils'
import css from './ExecutionPageHeader.module.scss'

View File

@ -3,7 +3,7 @@ import { Container, FlexExpander, Layout, Text } from '@harnessio/uicore'
import cx from 'classnames'
import type { TypesStage } from 'services/code'
import { ExecutionState, ExecutionStatus } from 'components/ExecutionStatus/ExecutionStatus'
import { getStatus } from 'utils/PipelineUtils'
import { getStatus } from 'utils/ExecutionUtils'
import { timeDistance } from 'utils/Utils'
import css from './ExecutionStageList.module.scss'

View File

@ -0,0 +1,189 @@
import React, { useState } from 'react'
import { Intent } from '@blueprintjs/core'
import * as yup from 'yup'
import { Button, Container, Layout, FlexExpander, Formik, FormikForm, FormInput, Text } from '@harnessio/uicore'
import { Icon } from '@harnessio/icons'
import { FontVariation } from '@harnessio/design-system'
import { useStrings } from 'framework/strings'
import { REGEX_VALID_REPO_NAME } from 'utils/Utils'
import { ImportFormData, RepoVisibility, parseUrl } from 'utils/GitUtils'
import css from '../NewRepoModalButton.module.scss'
interface ImportFormProps {
handleSubmit: (data: ImportFormData) => void
loading: boolean // eslint-disable-next-line @typescript-eslint/no-explicit-any
hideModal: any
}
const ImportForm = (props: ImportFormProps) => {
const { handleSubmit, loading, hideModal } = props
const { getString } = useStrings()
const [auth, setAuth] = useState(false)
// eslint-disable-next-line no-control-regex
const MATCH_REPOURL_REGEX = /^(https?:\/\/(?:www\.)?(github|gitlab)\.com\/([^/]+\/[^/]+))/
const formInitialValues: ImportFormData = {
repoUrl: '',
username: '',
password: '',
name: '',
description: '',
isPublic: RepoVisibility.PRIVATE
}
return (
<Formik
initialValues={formInitialValues}
formName="importRepoForm"
validationSchema={yup.object().shape({
repoUrl: yup
.string()
.matches(MATCH_REPOURL_REGEX, getString('importSpace.invalidUrl'))
.required(getString('importRepo.required')),
name: yup
.string()
.trim()
.required(getString('validation.nameIsRequired'))
.matches(REGEX_VALID_REPO_NAME, getString('validation.repoNamePatternIsNotValid'))
})}
onSubmit={handleSubmit}>
{formik => {
return (
<FormikForm>
<FormInput.Text
name="repoUrl"
label={getString('importRepo.url')}
placeholder={getString('importRepo.urlPlaceholder')}
tooltipProps={{
dataTooltipId: 'repositoryURLTextField'
}}
onChange={event => {
const target = event.target as HTMLInputElement
formik.setFieldValue('repoUrl', target.value)
if (target.value) {
const provider = parseUrl(target.value)
if (provider?.fullRepo) {
formik.setFieldValue('name', provider.repoName ? provider.repoName : provider?.fullRepo)
formik.validateField('repoUrl')
}
}
}}
/>
<FormInput.CheckBox
name="authorization"
label={getString('importRepo.reqAuth')}
tooltipProps={{
dataTooltipId: 'authorization'
}}
onClick={() => {
setAuth(!auth)
}}
/>
{auth ? (
<>
<FormInput.Text
name="username"
label={getString('userName')}
placeholder={getString('importRepo.userPlaceholder')}
tooltipProps={{
dataTooltipId: 'repositoryUserTextField'
}}
/>
<FormInput.Text
inputGroup={{ type: 'password' }}
name="password"
label={getString('importRepo.passToken')}
placeholder={getString('importRepo.passwordPlaceholder')}
tooltipProps={{
dataTooltipId: 'repositoryPasswordTextField'
}}
/>
</>
) : null}
<hr className={css.dividerContainer} />
<FormInput.Text
name="name"
label={getString('name')}
placeholder={getString('enterRepoName')}
tooltipProps={{
dataTooltipId: 'repositoryNameTextField'
}}
onChange={() => {
formik.validateField('repoUrl')
}}
/>
<FormInput.Text
name="description"
label={getString('description')}
placeholder={getString('enterDescription')}
tooltipProps={{
dataTooltipId: 'repositoryDescriptionTextField'
}}
/>
<hr className={css.dividerContainer} />
<Container>
<FormInput.RadioGroup
name="isPublic"
label=""
items={[
{
label: (
<Container>
<Layout.Horizontal>
<Icon name="git-clone-step" size={20} margin={{ right: 'medium' }} />
<Container>
<Layout.Vertical spacing="xsmall">
<Text>{getString('public')}</Text>
<Text font={{ variation: FontVariation.TINY }}>
{getString('createRepoModal.publicLabel')}
</Text>
</Layout.Vertical>
</Container>
</Layout.Horizontal>
</Container>
),
value: RepoVisibility.PUBLIC
},
{
label: (
<Container>
<Layout.Horizontal>
<Icon name="git-clone-step" size={20} margin={{ right: 'medium' }} />
<Container margin={{ left: 'small' }}>
<Layout.Vertical spacing="xsmall">
<Text>{getString('private')}</Text>
<Text font={{ variation: FontVariation.TINY }}>
{getString('createRepoModal.privateLabel')}
</Text>
</Layout.Vertical>
</Container>
</Layout.Horizontal>
</Container>
),
value: RepoVisibility.PRIVATE
}
]}
/>
</Container>
<Layout.Horizontal
spacing="small"
padding={{ right: 'xxlarge', top: 'xlarge', bottom: 'large' }}
style={{ alignItems: 'center' }}>
<Button type="submit" text={getString('importRepo.title')} intent={Intent.PRIMARY} disabled={loading} />
<Button text={getString('cancel')} minimal onClick={hideModal} />
<FlexExpander />
{loading && <Icon intent={Intent.PRIMARY} name="steps-spinner" size={16} />}
</Layout.Horizontal>
</FormikForm>
)
}}
</Formik>
)
}
export default ImportForm

View File

@ -1,3 +1,29 @@
.divider {
margin: var(--spacing-medium) 0 var(--spacing-large) 0 !important;
}
.dividerContainer {
opacity: 0.2;
height: 1px;
color: var(--grey-100);
margin: 20px 0;
}
.popover {
transform: translateY(5px) !important;
.menuItem {
strong {
display: inline-block;
margin-left: 10px;
}
p {
font-size: 12px;
padding: 0 var(--spacing-xlarge);
line-height: 16px;
margin: 5px 0;
max-width: 320px;
}
}
}

View File

@ -1,3 +1,6 @@
/* eslint-disable */
// This is an auto-generated file
export declare const divider: string
export declare const dividerContainer: string
export declare const menuItem: string
export declare const popover: string

View File

@ -1,5 +1,14 @@
import React, { useEffect, useMemo, useState } from 'react'
import { Icon as BPIcon, Classes, Dialog, Intent, Menu, MenuDivider, MenuItem } from '@blueprintjs/core'
import {
Icon as BPIcon,
Classes,
Dialog,
Intent,
Menu,
MenuDivider,
MenuItem,
PopoverPosition
} from '@blueprintjs/core'
import * as yup from 'yup'
import {
Button,
@ -15,7 +24,8 @@ import {
Text,
ButtonVariation,
ButtonSize,
TextInput
TextInput,
SplitButton
} from '@harnessio/uicore'
import { Icon } from '@harnessio/icons'
import { FontVariation } from '@harnessio/design-system'
@ -30,26 +40,19 @@ import {
REGEX_VALID_REPO_NAME,
SUGGESTED_BRANCH_NAMES
} from 'utils/Utils'
import { isGitBranchNameValid } from 'utils/GitUtils'
import {
ImportFormData,
RepoCreationType,
RepoFormData,
RepoVisibility,
isGitBranchNameValid,
parseUrl
} from 'utils/GitUtils'
import type { TypesRepository, OpenapiCreateRepositoryRequest } from 'services/code'
import { useAppContext } from 'AppContext'
import ImportForm from './ImportForm/ImportForm'
import css from './NewRepoModalButton.module.scss'
enum RepoVisibility {
PUBLIC = 'public',
PRIVATE = 'private'
}
interface RepoFormData {
name: string
description: string
license: string
defaultBranch: string
gitignore: string
addReadme: boolean
isPublic: RepoVisibility
}
const formInitialValues: RepoFormData = {
name: '',
description: '',
@ -90,6 +93,15 @@ export const NewRepoModalButton: React.FC<NewRepoModalButtonProps> = ({
space_path: space
}
})
const { mutate: importRepo, loading: importRepoLoading } = useMutate<TypesRepository>({
verb: 'POST',
path: `/api/v1/repos/import`,
queryParams: standalone
? undefined
: {
space_path: space
}
})
const {
data: gitignores,
loading: gitIgnoreLoading,
@ -100,14 +112,13 @@ export const NewRepoModalButton: React.FC<NewRepoModalButtonProps> = ({
loading: licenseLoading,
error: licenseError
} = useGet({ path: '/api/v1/resources/license' })
const loading = submitLoading || gitIgnoreLoading || licenseLoading
const loading = submitLoading || gitIgnoreLoading || licenseLoading || importRepoLoading
useEffect(() => {
if (gitIgnoreError || licenseError) {
showError(getErrorMessage(gitIgnoreError || licenseError), 0)
}
}, [gitIgnoreError, licenseError, showError])
const handleSubmit = (formData: RepoFormData) => {
try {
const payload: OpenapiCreateRepositoryRequest = {
@ -132,7 +143,24 @@ export const NewRepoModalButton: React.FC<NewRepoModalButtonProps> = ({
showError(getErrorMessage(exception), 0, getString('failedToCreateRepo'))
}
}
const handleImportSubmit = (formData: ImportFormData) => {
const provider = parseUrl(formData.repoUrl)
const importPayload = {
description: formData.description || '',
parent_ref: space,
uid: formData.name,
provider: { type: provider?.provider.toLowerCase(), username: formData.username, password: formData.password },
provider_repo: provider?.fullRepo
}
importRepo(importPayload)
.then(response => {
hideModal()
onSubmit(response)
})
.catch(_error => {
showError(getErrorMessage(_error), 0, getString('importRepo.failedToImportRepo'))
})
}
return (
<Dialog
isOpen
@ -145,147 +173,166 @@ export const NewRepoModalButton: React.FC<NewRepoModalButtonProps> = ({
style={{ height: '100%' }}
data-testid="add-target-to-flag-modal">
<Heading level={3} font={{ variation: FontVariation.H3 }} margin={{ bottom: 'xlarge' }}>
{modalTitle}
{repoOption.type === RepoCreationType.IMPORT ? getString('importRepo.title') : modalTitle}
</Heading>
<Container margin={{ right: 'xxlarge' }}>
<Formik
initialValues={formInitialValues}
formName="editVariations"
enableReinitialize={true}
validationSchema={yup.object().shape({
name: yup
.string()
.trim()
.required()
.matches(REGEX_VALID_REPO_NAME, getString('validation.repoNamePatternIsNotValid'))
})}
validateOnChange
validateOnBlur
onSubmit={handleSubmit}>
<FormikForm>
<FormInput.Text
name="name"
label={getString('name')}
placeholder={getString('enterRepoName')}
tooltipProps={{
dataTooltipId: 'repositoryNameTextField'
}}
inputGroup={{ autoFocus: true }}
/>
<FormInput.Text
name="description"
label={getString('description')}
placeholder={getString('enterDescription')}
tooltipProps={{
dataTooltipId: 'repositoryDescriptionTextField'
}}
/>
<Container margin={{ top: 'medium', bottom: 'medium' }}>
<Text>
{getString('createRepoModal.branchLabel')}
<strong>
<Button
text={branchName}
icon="git-new-branch"
rightIcon="chevron-down"
variation={ButtonVariation.TERTIARY}
size={ButtonSize.SMALL}
iconProps={{ size: 14 }}
tooltip={<BranchName currentBranchName={branchName} onSelect={name => setBranchName(name)} />}
tooltipProps={{ interactionKind: 'click' }}
/>
</strong>
{getString('createRepoModal.branch')}
</Text>
</Container>
<Container>
<FormInput.RadioGroup
name="isPublic"
label=""
items={[
{
label: (
<Container>
<Layout.Horizontal>
<Icon name="git-clone-step" size={20} margin={{ right: 'medium' }} />
<Container>
<Layout.Vertical spacing="xsmall">
<Text>{getString('public')}</Text>
<Text font={{ variation: FontVariation.TINY }}>
{getString('createRepoModal.publicLabel')}
</Text>
</Layout.Vertical>
</Container>
</Layout.Horizontal>
</Container>
),
value: RepoVisibility.PUBLIC
},
{
label: (
<Container>
<Layout.Horizontal>
<Icon name="git-clone-step" size={20} margin={{ right: 'medium' }} />
<Container margin={{ left: 'small' }}>
<Layout.Vertical spacing="xsmall">
<Text>{getString('private')}</Text>
<Text font={{ variation: FontVariation.TINY }}>
{getString('createRepoModal.privateLabel')}
</Text>
</Layout.Vertical>
</Container>
</Layout.Horizontal>
</Container>
),
value: RepoVisibility.PRIVATE
}
]}
{repoOption.type === RepoCreationType.IMPORT ? (
<ImportForm hideModal={hideModal} handleSubmit={handleImportSubmit} loading={false} />
) : (
<Formik
initialValues={formInitialValues}
formName="editVariations"
enableReinitialize={true}
validationSchema={yup.object().shape({
name: yup
.string()
.trim()
.required()
.matches(REGEX_VALID_REPO_NAME, getString('validation.repoNamePatternIsNotValid'))
})}
validateOnChange
validateOnBlur
onSubmit={handleSubmit}>
<FormikForm>
<FormInput.Text
name="name"
label={getString('name')}
placeholder={getString('enterRepoName')}
tooltipProps={{
dataTooltipId: 'repositoryNameTextField'
}}
inputGroup={{ autoFocus: true }}
/>
</Container>
<FormInput.Text
name="description"
label={getString('description')}
placeholder={getString('enterDescription')}
tooltipProps={{
dataTooltipId: 'repositoryDescriptionTextField'
}}
/>
<Container margin={{ top: 'medium', bottom: 'medium' }}>
<Text>
{getString('createRepoModal.branchLabel')}
<strong>
<Button
text={branchName}
icon="git-new-branch"
rightIcon="chevron-down"
variation={ButtonVariation.TERTIARY}
size={ButtonSize.SMALL}
iconProps={{ size: 14 }}
tooltip={<BranchName currentBranchName={branchName} onSelect={name => setBranchName(name)} />}
tooltipProps={{ interactionKind: 'click' }}
/>
</strong>
{getString('createRepoModal.branch')}
</Text>
</Container>
<Container>
<FormInput.RadioGroup
name="isPublic"
label=""
items={[
{
label: (
<Container>
<Layout.Horizontal>
<Icon name="git-clone-step" size={20} margin={{ right: 'medium' }} />
<Container>
<Layout.Vertical spacing="xsmall">
<Text>{getString('public')}</Text>
<Text font={{ variation: FontVariation.TINY }}>
{getString('createRepoModal.publicLabel')}
</Text>
</Layout.Vertical>
</Container>
</Layout.Horizontal>
</Container>
),
value: RepoVisibility.PUBLIC
},
{
label: (
<Container>
<Layout.Horizontal>
<Icon name="git-clone-step" size={20} margin={{ right: 'medium' }} />
<Container margin={{ left: 'small' }}>
<Layout.Vertical spacing="xsmall">
<Text>{getString('private')}</Text>
<Text font={{ variation: FontVariation.TINY }}>
{getString('createRepoModal.privateLabel')}
</Text>
</Layout.Vertical>
</Container>
</Layout.Horizontal>
</Container>
),
value: RepoVisibility.PRIVATE
}
]}
/>
</Container>
<FormInput.Select
name="license"
label={getString('addLicense')}
placeholder={getString('none')}
items={licences || []}
usePortal
/>
<FormInput.Select
name="license"
label={getString('addLicense')}
placeholder={getString('none')}
items={licences || []}
usePortal
/>
<FormInput.Select
name="gitignore"
label={getString('addGitIgnore')}
placeholder={getString('none')}
items={(gitignores || []).map((entry: string) => ({ label: entry, value: entry }))}
usePortal
/>
<FormInput.Select
name="gitignore"
label={getString('addGitIgnore')}
placeholder={getString('none')}
items={(gitignores || []).map((entry: string) => ({ label: entry, value: entry }))}
usePortal
/>
<FormInput.CheckBox
name="addReadme"
label={getString('addReadMe')}
tooltipProps={{
dataTooltipId: 'addReadMe'
}}
/>
<Layout.Horizontal
spacing="small"
padding={{ right: 'xxlarge', top: 'xxxlarge', bottom: 'large' }}
style={{ alignItems: 'center' }}>
<Button type="submit" text={getString('createRepo')} intent={Intent.PRIMARY} disabled={loading} />
<Button text={cancelButtonTitle || getString('cancel')} minimal onClick={hideModal} />
<FlexExpander />
<FormInput.CheckBox
name="addReadme"
label={getString('addReadMe')}
tooltipProps={{
dataTooltipId: 'addReadMe'
}}
/>
<Layout.Horizontal
spacing="small"
padding={{ right: 'xxlarge', top: 'xxxlarge', bottom: 'large' }}
style={{ alignItems: 'center' }}>
<Button type="submit" text={getString('createRepo')} intent={Intent.PRIMARY} disabled={loading} />
<Button text={cancelButtonTitle || getString('cancel')} minimal onClick={hideModal} />
<FlexExpander />
{loading && <Icon intent={Intent.PRIMARY} name="steps-spinner" size={16} />}
</Layout.Horizontal>
</FormikForm>
</Formik>
{loading && <Icon intent={Intent.PRIMARY} name="steps-spinner" size={16} />}
</Layout.Horizontal>
</FormikForm>
</Formik>
)}
</Container>
</Layout.Vertical>
</Dialog>
)
}
const { getString } = useStrings()
const [openModal, hideModal] = useModalHook(ModalComponent, [onSubmit])
const repoCreateOptions: RepoCreationOption[] = [
{
type: RepoCreationType.CREATE,
title: getString('newRepo'),
desc: getString('createNewRepo')
},
{
type: RepoCreationType.IMPORT,
title: getString('importGitRepo'),
desc: getString('importGitRepo')
}
]
const [repoOption, setRepoOption] = useState<RepoCreationOption>(repoCreateOptions[0])
const [openModal, hideModal] = useModalHook(ModalComponent, [onSubmit, repoOption])
const { standalone } = useAppContext()
const { hooks } = useAppContext()
const permResult = hooks?.usePermissionTranslate?.(
@ -297,8 +344,48 @@ export const NewRepoModalButton: React.FC<NewRepoModalButtonProps> = ({
},
[space]
)
return (
<SplitButton
{...props}
loading={false}
text={repoOption.title}
variation={ButtonVariation.PRIMARY}
popoverProps={{
interactionKind: 'click',
usePortal: true,
popoverClassName: css.popover,
position: PopoverPosition.BOTTOM_RIGHT,
transitionDuration: 1000
}}
icon={repoOption.type === RepoCreationType.IMPORT ? undefined : 'plus'}
{...permissionProps(permResult, standalone)}
onClick={() => {
openModal()
}}>
{repoCreateOptions.map(option => {
return (
<Menu.Item
key={option.type}
className={css.menuItem}
text={
<>
<p>{option.desc}</p>
</>
}
onClick={() => {
setRepoOption(option)
}}
/>
)
})}
</SplitButton>
)
}
return <Button onClick={openModal} {...props} {...permissionProps(permResult, standalone)} />
interface RepoCreationOption {
type: RepoCreationType
title: string
desc: string
}
interface BranchNameProps {

View File

@ -21,7 +21,7 @@ import { useStrings } from 'framework/strings'
import type { OpenapiCreateSecretRequest, TypesSecret } from 'services/code'
import { getErrorMessage } from 'utils/Utils'
interface SecretFormData {
export interface SecretFormData {
value: string
description: string
name: string
@ -82,10 +82,7 @@ export const NewSecretModalButton: React.FC<NewSecretModalButtonProps> = ({
onClose={hideModal}
title={''}
style={{ width: 700, maxHeight: '95vh', overflow: 'auto' }}>
<Layout.Vertical
padding={{ left: 'xxlarge' }}
style={{ height: '100%' }}
data-testid="add-target-to-flag-modal">
<Layout.Vertical padding={{ left: 'xxlarge' }} style={{ height: '100%' }} data-testid="add-secret-modal">
<Heading level={3} font={{ variation: FontVariation.H3 }} margin={{ bottom: 'xlarge' }}>
{modalTitle}
</Heading>

View File

@ -0,0 +1,316 @@
import React, { useState } from 'react'
import { Intent } from '@blueprintjs/core'
import * as yup from 'yup'
import { Color } from '@harnessio/design-system'
import { Button, Container, Label, Layout, FlexExpander, Formik, FormikForm, FormInput, Text } from '@harnessio/uicore'
import { Icon } from '@harnessio/icons'
import { useStrings } from 'framework/strings'
import { Organization, type ImportSpaceFormData } from 'utils/GitUtils'
import css from '../NewSpaceModalButton.module.scss'
interface ImportFormProps {
handleSubmit: (data: ImportSpaceFormData) => void
loading: boolean
// eslint-disable-next-line @typescript-eslint/no-explicit-any
hideModal: any
}
const ImportSpaceForm = (props: ImportFormProps) => {
const { handleSubmit, loading, hideModal } = props
const { getString } = useStrings()
const [auth, setAuth] = useState(false)
const [step, setStep] = useState(0)
const [buttonLoading, setButtonLoading] = useState(false)
const formInitialValues: ImportSpaceFormData = {
gitProvider: '',
username: '',
password: '',
name: '',
description: '',
organization: ''
}
const providers = [
{ value: 'Github', label: 'Github' },
{ value: 'Gitlab', label: 'Gitlab' }
]
const validationSchemaStepOne = yup.object().shape({
gitProvider: yup.string().trim().required(getString('importSpace.providerRequired'))
})
const validationSchemaStepTwo = yup.object().shape({
organization: yup.string().trim().required(getString('importSpace.orgRequired')),
name: yup.string().trim().required(getString('importSpace.spaceNameRequired'))
})
return (
<Formik
initialValues={formInitialValues}
formName="importSpaceForm"
enableReinitialize={true}
validateOnBlur
onSubmit={handleSubmit}>
{formik => {
const { values } = formik
const handleValidationClick = async () => {
try {
if (step === 0) {
await validationSchemaStepOne.validate(formik.values, { abortEarly: false })
setStep(1)
} else if (step === 1) {
await validationSchemaStepTwo.validate(formik.values, { abortEarly: false })
setButtonLoading(true)
} // eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (err: any) {
formik.setErrors(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
err.inner.reduce((acc: { [x: string]: any }, current: { path: string | number; message: string }) => {
acc[current.path] = current.message
return acc
}, {})
)
}
}
const handleImport = async () => {
await handleSubmit(formik.values)
setButtonLoading(false)
}
return (
<Container width={'97%'}>
<FormikForm>
{step === 0 ? (
<>
<Container width={'70%'}>
<Layout.Horizontal>
<Icon className={css.icon} name="code-info" size={16} />
<Text padding={{ left: 'small' }} font={{ size: 'small' }}>
{getString('importSpace.content')}
</Text>
</Layout.Horizontal>
</Container>
<hr className={css.dividerContainer} />
<Container className={css.textContainer} width={'70%'}>
<FormInput.Select
value={{ value: values.gitProvider, label: values.gitProvider } || providers[0]}
name={'gitProvider'}
label={getString('importSpace.gitProvider')}
items={providers}
className={css.selectBox}
/>
{formik.errors.gitProvider ? (
<Text
margin={{ top: 'small', bottom: 'small' }}
color={Color.RED_500}
icon="circle-cross"
iconProps={{ color: Color.RED_500 }}>
{formik.errors.gitProvider}
</Text>
) : null}
<Layout.Horizontal flex>
{getString('importSpace.authorization')}
<Container padding={{ left: 'small' }} width={'100%'}>
<hr className={css.dividerContainer} />
</Container>
</Layout.Horizontal>
<FormInput.Text
name="username"
label={getString('userName')}
placeholder={getString('importRepo.userPlaceholder')}
tooltipProps={{
dataTooltipId: 'spaceUserTextField'
}}
/>
{formik.errors.username ? (
<Text
margin={{ top: 'small', bottom: 'small' }}
color={Color.RED_500}
icon="circle-cross"
iconProps={{ color: Color.RED_500 }}>
{formik.errors.username}
</Text>
) : null}
<FormInput.Text
name="password"
label={getString('importRepo.passToken')}
placeholder={getString('importRepo.passwordPlaceholder')}
tooltipProps={{
dataTooltipId: 'spacePasswordTextField'
}}
inputGroup={{ type: 'password' }}
/>
{formik.errors.password ? (
<Text
margin={{ top: 'small', bottom: 'small' }}
color={Color.RED_500}
icon="circle-cross"
iconProps={{ color: Color.RED_500 }}>
{formik.errors.password}
</Text>
) : null}
</Container>
</>
) : null}
{step === 1 ? (
<>
<Layout.Horizontal flex>
{/* <Container className={css.detailsLabel}> */}
<Text className={css.detailsLabel} font={{ size: 'small' }} flex>
{getString('importSpace.details')}
</Text>
{/* </Container> */}
<Container padding={{ left: 'small' }} width={'100%'}>
<hr className={css.dividerContainer} />
</Container>
</Layout.Horizontal>
<Container className={css.textContainer} width={'70%'}>
<FormInput.Text
name="organization"
label={
formik.values.gitProvider === Organization.GITHUB
? getString('importSpace.githubOrg')
: getString('importSpace.gitlabGroup')
}
placeholder={getString('importSpace.orgNamePlaceholder')}
tooltipProps={{
dataTooltipId: 'importSpaceOrgName'
}}
/>
{formik.errors.organization ? (
<Text
margin={{ bottom: 'small' }}
color={Color.RED_500}
icon="circle-cross"
iconProps={{ color: Color.RED_500 }}>
{formik.errors.organization}
</Text>
) : null}
<Layout.Horizontal>
<Label>{getString('importSpace.importLabel')}</Label>
<Icon padding={{ left: 'small' }} className={css.icon} name="code-info" size={16} />
</Layout.Horizontal>
<Container className={css.importContainer} padding={'medium'}>
<Layout.Horizontal>
<FormInput.CheckBox
name="repositories"
label={getString('pageTitle.repositories')}
tooltipProps={{
dataTooltipId: 'authorization'
}}
defaultChecked
onClick={() => {
setAuth(!auth)
}}
disabled
padding={{ right: 'small' }}
className={css.checkbox}
/>
<Container padding={{ left: 'xxxlarge' }}>
<FormInput.CheckBox
name="pipelines"
label={getString('pageTitle.pipelines')}
tooltipProps={{
dataTooltipId: 'pipelines'
}}
onClick={() => {
setAuth(!auth)
}}
/>
</Container>
</Layout.Horizontal>
</Container>
<Container>
<hr className={css.dividerContainer} />
<FormInput.Text
name="name"
label={getString('importSpace.spaceName')}
placeholder={getString('enterName')}
tooltipProps={{
dataTooltipId: 'importSpaceName'
}}
/>
{formik.errors.name ? (
<Text
margin={{ bottom: 'small' }}
color={Color.RED_500}
icon="circle-cross"
iconProps={{ color: Color.RED_500 }}>
{formik.errors.name}
</Text>
) : null}
<FormInput.Text
name="description"
label={getString('importSpace.description')}
placeholder={getString('importSpace.descPlaceholder')}
tooltipProps={{
dataTooltipId: 'importSpaceDesc'
}}
/>
</Container>
</Container>
</>
) : null}
<hr className={css.dividerContainer} />
<Layout.Horizontal
spacing="small"
padding={{ right: 'xxlarge', bottom: 'large' }}
style={{ alignItems: 'center' }}>
{step === 1 ? (
<Button
disabled={buttonLoading}
text={
buttonLoading ? (
<>
<Container className={css.loadingIcon} width={93.5} flex={{ alignItems: 'center' }}>
<Icon className={css.loadingIcon} name="steps-spinner" size={16} />
</Container>
</>
) : (
getString('importSpace.title')
)
}
intent={Intent.PRIMARY}
onClick={() => {
handleValidationClick()
if (formik.values.name !== '' || formik.values.organization !== '') {
handleImport()
setButtonLoading(false)
}
formik.setErrors({})
}}
/>
) : (
<Button
text={getString('importSpace.next')}
intent={Intent.PRIMARY}
onClick={() => {
handleValidationClick()
if (
(!formik.errors.gitProvider && formik.touched.gitProvider) ||
(!formik.errors.username && formik.touched.username) ||
(!formik.errors.password && formik.touched.password)
) {
formik.setErrors({})
setStep(1)
}
}}
/>
)}
<Button text={getString('cancel')} minimal onClick={hideModal} />
<FlexExpander />
{loading && <Icon intent={Intent.PRIMARY} name="steps-spinner" size={16} />}
</Layout.Horizontal>
</FormikForm>
</Container>
)
}}
</Formik>
)
}
export default ImportSpaceForm

View File

@ -1,3 +1,127 @@
.divider {
margin: var(--spacing-medium) 0 var(--spacing-large) 0 !important;
}
.popoverSplit {
:global {
.bp3-menu {
min-width: 248px;
max-width: 248px;
}
.bp3-menu-item {
min-width: 240px;
max-width: 240px;
}
.menuItem {
max-width: fit-content;
}
}
}
.test {
padding: 20px;
}
.dividerContainer {
opacity: 0.2;
height: 1px;
color: var(--grey-100);
margin: 20px 0;
}
.dividerContainer {
opacity: 0.2;
height: 1px;
color: var(--grey-100);
margin: 20px 0;
}
.halfDividerContainer {
opacity: 0.2;
height: 1px;
color: var(--grey-100);
margin-bottom: 20px;
}
.textContainer {
font-size: var(--font-size-small) !important;
--form-input-font-size: var(--font-size-small) !important;
.selectBox {
margin-bottom: unset !important;
}
}
.menuItem {
max-width: fit-content;
}
.importContainer {
background: var(--grey-50) !important;
border: 1px solid var(--grey-200) !important;
border-radius: 4px;
:global {
.bp3-form-group {
margin: unset !important;
}
}
}
.loadingIcon {
fill: var(--grey-0) !important;
color: var(--grey-0) !important;
:global {
.bp3-icon {
padding-left: 45px !important;
fill: var(--grey-0) !important;
color: var(--grey-0) !important;
}
}
}
.detailsLabel {
white-space: nowrap !important;
max-width: 155px !important;
color: var(--grey-600) !important;
}
.icon {
> svg {
fill: var(--primary-7) !important;
> path {
fill: var(--primary-7) !important;
}
}
}
.checkbox {
:global {
.Tooltip--acenter {
opacity: 0.7 !important;
}
.bp3-control-indicator {
background: var(--primary-7) !important;
opacity: 0.7;
}
}
}
.popoverSpace {
position: relative;
left: 33px;
:global {
.bp3-menu {
min-width: 162px;
max-width: 162px;
}
.bp3-menu-item {
min-width: 153px;
max-width: 153px;
}
.menuItem {
max-width: fit-content;
}
}
}

Some files were not shown because too many files have changed in this diff Show More