diff --git a/README.md b/README.md index cb76c67fc..7fcdea250 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,6 @@ $ ./gitness register > NOTE: A user `admin` (pw: `changeit`) gets created by default. - Login to the application: ```bash diff --git a/cli/server/server.go b/cli/server/server.go index 7f95e8f0d..779a5b5bb 100644 --- a/cli/server/server.go +++ b/cli/server/server.go @@ -73,6 +73,10 @@ func (c *command) run(*kingpin.ParseContext) error { // - ctx is canceled g, gCtx := errgroup.WithContext(ctx) + g.Go(func() error { + return system.services.JobScheduler.Run(gCtx) + }) + // start server gHTTP, shutdownHTTP := system.server.ListenAndServe() g.Go(gHTTP.Wait) @@ -116,6 +120,8 @@ func (c *command) run(*kingpin.ParseContext) error { } } + system.services.JobScheduler.WaitJobsDone(shutdownCtx) + log.Info().Msg("wait for subroutines to complete") err = g.Wait() diff --git a/cmd/gitness/wire.go b/cmd/gitness/wire.go index 8d4b0db68..54261520e 100644 --- a/cmd/gitness/wire.go +++ b/cmd/gitness/wire.go @@ -44,6 +44,7 @@ import ( "github.com/harness/gitness/internal/server" "github.com/harness/gitness/internal/services" "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/services/job" pullreqservice "github.com/harness/gitness/internal/services/pullreq" "github.com/harness/gitness/internal/services/webhook" "github.com/harness/gitness/internal/store" @@ -102,6 +103,7 @@ func initSystem(ctx context.Context, config *types.Config) (*cliserver.System, e lock.WireSet, pubsub.WireSet, codecomments.WireSet, + job.WireSet, gitrpccron.WireSet, checkcontroller.WireSet, execution.WireSet, diff --git a/cmd/gitness/wire_gen.go b/cmd/gitness/wire_gen.go index 2ae30f97e..70f0e0692 100644 --- a/cmd/gitness/wire_gen.go +++ b/cmd/gitness/wire_gen.go @@ -42,6 +42,7 @@ import ( server2 "github.com/harness/gitness/internal/server" "github.com/harness/gitness/internal/services" "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/services/job" pullreq2 "github.com/harness/gitness/internal/services/pullreq" "github.com/harness/gitness/internal/services/webhook" "github.com/harness/gitness/internal/store" @@ -87,6 +88,7 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro } pathUID := check.ProvidePathUIDCheck() repoStore := database.ProvideRepoStore(db, pathCache) + pipelineStore := database.ProvidePipelineStore(db) gitrpcConfig, err := server.ProvideGitRPCClientConfig() if err != nil { return nil, err @@ -95,27 +97,26 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro if err != nil { return nil, err } - repoController := repo.ProvideController(config, db, provider, pathUID, authorizer, pathStore, repoStore, spaceStore, principalStore, gitrpcInterface) + repoController := repo.ProvideController(config, db, provider, pathUID, authorizer, pathStore, repoStore, spaceStore, pipelineStore, principalStore, gitrpcInterface) executionStore := database.ProvideExecutionStore(db) stageStore := database.ProvideStageStore(db) - pipelineStore := database.ProvidePipelineStore(db) - executionController := execution.ProvideController(db, authorizer, executionStore, stageStore, pipelineStore, spaceStore) + executionController := execution.ProvideController(db, authorizer, executionStore, repoStore, stageStore, pipelineStore) stepStore := database.ProvideStepStore(db) logStore := logs.ProvideLogStore(db, config) logStream := livelog.ProvideLogStream(config) - logsController := logs2.ProvideController(db, authorizer, executionStore, pipelineStore, stageStore, stepStore, logStore, logStream, spaceStore) + logsController := logs2.ProvideController(db, authorizer, executionStore, repoStore, pipelineStore, stageStore, stepStore, logStore, logStream) secretStore := database.ProvideSecretStore(db) connectorStore := database.ProvideConnectorStore(db) templateStore := database.ProvideTemplateStore(db) spaceController := space.ProvideController(db, provider, pathUID, authorizer, pathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, repoController, membershipStore) - pipelineController := pipeline.ProvideController(db, pathUID, pathStore, repoStore, authorizer, pipelineStore, spaceStore) + pipelineController := pipeline.ProvideController(db, pathUID, pathStore, repoStore, authorizer, pipelineStore) encrypter, err := encrypt.ProvideEncrypter(config) if err != nil { return nil, err } secretController := secret.ProvideController(db, pathUID, pathStore, encrypter, secretStore, authorizer, spaceStore) triggerStore := database.ProvideTriggerStore(db) - triggerController := trigger.ProvideController(db, authorizer, triggerStore, pipelineStore, spaceStore) + triggerController := trigger.ProvideController(db, authorizer, triggerStore, pipelineStore, repoStore) connectorController := connector.ProvideController(db, pathUID, connectorStore, authorizer, spaceStore) templateController := template.ProvideController(db, pathUID, templateStore, authorizer, spaceStore) pluginStore := database.ProvidePluginStore(db) @@ -202,7 +203,13 @@ func initSystem(ctx context.Context, config *types.Config) (*server.System, erro if err != nil { return nil, err } - servicesServices := services.ProvideServices(webhookService, pullreqService) + jobStore := database.ProvideJobStore(db) + executor := job.ProvideExecutor(jobStore, pubSub) + scheduler, err := job.ProvideScheduler(jobStore, executor, mutexManager, pubSub, config) + if err != nil { + return nil, err + } + servicesServices := services.ProvideServices(webhookService, pullreqService, executor, scheduler) serverSystem := server.NewSystem(bootstrapBootstrap, serverServer, grpcServer, manager, servicesServices) return serverSystem, nil } diff --git a/gitrpc/internal/gitea/diff.go b/gitrpc/internal/gitea/diff.go index 8f5fb27a0..7d871756b 100644 --- a/gitrpc/internal/gitea/diff.go +++ b/gitrpc/internal/gitea/diff.go @@ -53,9 +53,9 @@ func (g Adapter) DiffShortStat( headRef string, useMergeBase bool, ) (types.DiffShortStat, error) { - separator := "..." - if !useMergeBase { - separator = ".." + separator := ".." + if useMergeBase { + separator = "..." } shortstatArgs := []string{baseRef + separator + headRef} diff --git a/gitrpc/internal/service/diff.go b/gitrpc/internal/service/diff.go index 159eec171..c4f8c2879 100644 --- a/gitrpc/internal/service/diff.go +++ b/gitrpc/internal/service/diff.go @@ -83,12 +83,7 @@ func (s DiffService) DiffShortStat(ctx context.Context, r *rpc.DiffRequest) (*rp base := r.GetBase() repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) - // direct comparison - // when direct is false then its like you use --merge-base - // to find best common ancestor(s) between two refs - direct := !r.GetMergeBase() - - stat, err := s.adapter.DiffShortStat(ctx, repoPath, r.GetBaseRef(), r.GetHeadRef(), direct) + stat, err := s.adapter.DiffShortStat(ctx, repoPath, r.GetBaseRef(), r.GetHeadRef(), r.GetMergeBase()) if err != nil { return nil, processGitErrorf(err, "failed to fetch short statistics "+ "between %s and %s", r.GetBaseRef(), r.GetHeadRef()) diff --git a/gitrpc/internal/service/interface.go b/gitrpc/internal/service/interface.go index 255ab68b9..ae9936c0e 100644 --- a/gitrpc/internal/service/interface.go +++ b/gitrpc/internal/service/interface.go @@ -75,7 +75,7 @@ type GitAdapter interface { repoPath string, baseRef string, headRef string, - direct bool) (types.DiffShortStat, error) + useMergeBase bool) (types.DiffShortStat, error) GetDiffHunkHeaders(ctx context.Context, repoPath string, diff --git a/go.mod b/go.mod index bbb1b79a8..f960b03fd 100644 --- a/go.mod +++ b/go.mod @@ -18,10 +18,11 @@ require ( github.com/golang/mock v1.6.0 github.com/google/go-cmp v0.5.9 github.com/google/wire v0.5.0 + github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75 github.com/gotidy/ptr v1.3.0 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/guregu/null v4.0.0+incompatible - github.com/harness/go-rbac v0.0.0-20230409233212-ca97fe90aac8 + github.com/harness/go-rbac v0.0.0-20230829014129-c9b217856ea2 github.com/hashicorp/go-multierror v1.1.1 github.com/jmoiron/sqlx v1.3.3 github.com/joho/godotenv v1.3.0 diff --git a/go.sum b/go.sum index b2c275ac0..d8c1eb41f 100644 --- a/go.sum +++ b/go.sum @@ -254,6 +254,8 @@ github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5 github.com/googleapis/gax-go/v2 v2.7.0 h1:IcsPKeInNvYi7eqSaDjiZqDDKu5rsmunY0Y1YupQSSQ= github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75 h1:f0n1xnMSmBLzVfsMMvriDyA75NB/oBgILX2GcHXIQzY= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= @@ -267,8 +269,8 @@ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgf github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/guregu/null v4.0.0+incompatible h1:4zw0ckM7ECd6FNNddc3Fu4aty9nTlpkkzH7dPn4/4Gw= github.com/guregu/null v4.0.0+incompatible/go.mod h1:ePGpQaN9cw0tj45IR5E5ehMvsFlLlQZAkkOXZurJ3NM= -github.com/harness/go-rbac v0.0.0-20230409233212-ca97fe90aac8 h1:sQzaA/ithB9mCXTC5VeC4XTWmQ531Tefbgxr1X4y7WU= -github.com/harness/go-rbac v0.0.0-20230409233212-ca97fe90aac8/go.mod h1:uGgBgSZPgyygG5rWzoYsKIQ8TM4zt5yQq9nreznWvOI= +github.com/harness/go-rbac v0.0.0-20230829014129-c9b217856ea2 h1:M1Jd2uEKl4YW9g/6vzN1qo06d5dshYYdwxlhOTUSnh4= +github.com/harness/go-rbac v0.0.0-20230829014129-c9b217856ea2/go.mod h1:uGgBgSZPgyygG5rWzoYsKIQ8TM4zt5yQq9nreznWvOI= github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= @@ -367,6 +369,7 @@ github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmoiron/sqlx v1.3.3 h1:j82X0bf7oQ27XeqxicSZsTU5suPwKElg3oyxNn43iTk= github.com/jmoiron/sqlx v1.3.3/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ= diff --git a/internal/api/auth/pipeline.go b/internal/api/auth/pipeline.go index 8c5c199a6..8d012b8ca 100644 --- a/internal/api/auth/pipeline.go +++ b/internal/api/auth/pipeline.go @@ -9,21 +9,26 @@ import ( "github.com/harness/gitness/internal/auth" "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/paths" "github.com/harness/gitness/types" "github.com/harness/gitness/types/enum" + "github.com/pkg/errors" ) -// CheckPipeline checks if a repo specific permission is granted for the current auth session -// in the scope of its parent. +// CheckPipeline checks if a pipeline specific permission is granted for the current auth session +// in the scope of the parent. // Returns nil if the permission is granted, otherwise returns an error. // NotAuthenticated, NotAuthorized, or any underlying error. func CheckPipeline(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, - parentPath, uid string, permission enum.Permission) error { - scope := &types.Scope{SpacePath: parentPath} + repoPath string, pipelineUID string, permission enum.Permission) error { + spacePath, repoName, err := paths.DisectLeaf(repoPath) + if err != nil { + return errors.Wrapf(err, "Failed to disect path '%s'", repoPath) + } + scope := &types.Scope{SpacePath: spacePath, Repo: repoName} resource := &types.Resource{ Type: enum.ResourceTypePipeline, - Name: uid, + Name: pipelineUID, } - return Check(ctx, authorizer, session, scope, resource, permission) } diff --git a/internal/api/controller/check/check_report.go b/internal/api/controller/check/check_report.go index add5408a9..ce872e147 100644 --- a/internal/api/controller/check/check_report.go +++ b/internal/api/controller/check/check_report.go @@ -68,7 +68,7 @@ func (in *ReportInput) Validate() error { in.Payload.Kind) } - payloadDataJSON, err := sanitizeJsonPayload(in.Payload.Data, &types.CheckPayloadText{}) + payloadDataJSON, err := sanitizeJSONPayload(in.Payload.Data, &types.CheckPayloadText{}) if err != nil { return err } @@ -79,7 +79,7 @@ func (in *ReportInput) Validate() error { return nil } -func sanitizeJsonPayload(source json.RawMessage, data any) (json.RawMessage, error) { +func sanitizeJSONPayload(source json.RawMessage, data any) (json.RawMessage, error) { if len(source) == 0 { return json.Marshal(data) // marshal the empty object } @@ -118,7 +118,7 @@ func (c *Controller) Report( in *ReportInput, metadata map[string]string, ) (*types.Check, error) { - repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionCommitCheckReport) + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoReportCommitCheck) if err != nil { return nil, fmt.Errorf("failed to acquire access access to repo: %w", err) } diff --git a/internal/api/controller/execution/controller.go b/internal/api/controller/execution/controller.go index a6801f361..00753c52a 100644 --- a/internal/api/controller/execution/controller.go +++ b/internal/api/controller/execution/controller.go @@ -15,25 +15,25 @@ type Controller struct { db *sqlx.DB authorizer authz.Authorizer executionStore store.ExecutionStore + repoStore store.RepoStore stageStore store.StageStore pipelineStore store.PipelineStore - spaceStore store.SpaceStore } func NewController( db *sqlx.DB, authorizer authz.Authorizer, executionStore store.ExecutionStore, + repoStore store.RepoStore, stageStore store.StageStore, pipelineStore store.PipelineStore, - spaceStore store.SpaceStore, ) *Controller { return &Controller{ db: db, authorizer: authorizer, executionStore: executionStore, + repoStore: repoStore, stageStore: stageStore, pipelineStore: pipelineStore, - spaceStore: spaceStore, } } diff --git a/internal/api/controller/execution/create.go b/internal/api/controller/execution/create.go index a32a738f7..662105c87 100644 --- a/internal/api/controller/execution/create.go +++ b/internal/api/controller/execution/create.go @@ -23,25 +23,25 @@ type CreateInput struct { func (c *Controller) Create( ctx context.Context, session *auth.Session, - spaceRef string, - uid string, + repoRef string, + pipelineUID string, in *CreateInput, ) (*types.Execution, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) } - - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, uid) - if err != nil { - return nil, fmt.Errorf("failed to find pipeline: %w", err) - } - - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineExecute) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, + pipelineUID, enum.PermissionPipelineExecute) if err != nil { return nil, fmt.Errorf("failed to authorize: %w", err) } + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + pipeline, err = c.pipelineStore.IncrementSeqNum(ctx, pipeline) if err != nil { return nil, fmt.Errorf("failed to increment sequence number: %w", err) diff --git a/internal/api/controller/execution/delete.go b/internal/api/controller/execution/delete.go index 2b4c808dd..a12916db4 100644 --- a/internal/api/controller/execution/delete.go +++ b/internal/api/controller/execution/delete.go @@ -16,23 +16,23 @@ import ( func (c *Controller) Delete( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, executionNum int64, ) error { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return fmt.Errorf("failed to find parent space: %w", err) + return fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineDelete) + if err != nil { + return fmt.Errorf("failed to authorize: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return fmt.Errorf("failed to find pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineDelete) - if err != nil { - return fmt.Errorf("could not authorize: %w", err) - } err = c.executionStore.Delete(ctx, pipeline.ID, executionNum) if err != nil { return fmt.Errorf("could not delete execution: %w", err) diff --git a/internal/api/controller/execution/find.go b/internal/api/controller/execution/find.go index cdd464f48..686c2c1f9 100644 --- a/internal/api/controller/execution/find.go +++ b/internal/api/controller/execution/find.go @@ -17,25 +17,23 @@ import ( func (c *Controller) Find( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, executionNum int64, ) (*types.Execution, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find parent space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return nil, fmt.Errorf("failed to find pipeline: %w", err) } - - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineView) - if err != nil { - return nil, fmt.Errorf("could not authorize: %w", err) - } - execution, err := c.executionStore.Find(ctx, pipeline.ID, executionNum) if err != nil { return nil, fmt.Errorf("failed to find execution %d: %w", executionNum, err) diff --git a/internal/api/controller/execution/list.go b/internal/api/controller/execution/list.go index a5ba72f6b..47f1e7b0d 100644 --- a/internal/api/controller/execution/list.go +++ b/internal/api/controller/execution/list.go @@ -17,24 +17,25 @@ import ( func (c *Controller) List( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, pagination types.Pagination, ) ([]*types.Execution, int64, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, 0, fmt.Errorf("failed to find parent space: %w", err) - } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) - if err != nil { - return nil, 0, fmt.Errorf("failed to find pipeline: %w", err) + return nil, 0, fmt.Errorf("failed to find repo by ref: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineView) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) if err != nil { return nil, 0, fmt.Errorf("failed to authorize: %w", err) } + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, 0, fmt.Errorf("failed to find pipeline: %w", err) + } + var count int64 var executions []*types.Execution diff --git a/internal/api/controller/execution/update.go b/internal/api/controller/execution/update.go index 7e2e6b6e8..7c2184b0a 100644 --- a/internal/api/controller/execution/update.go +++ b/internal/api/controller/execution/update.go @@ -21,21 +21,21 @@ type UpdateInput struct { func (c *Controller) Update( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, executionNum int64, in *UpdateInput) (*types.Execution, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipelineUID, enum.PermissionPipelineEdit) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) if err != nil { - return nil, fmt.Errorf("failed to check auth: %w", err) + return nil, fmt.Errorf("failed to authorize: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return nil, fmt.Errorf("failed to find pipeline: %w", err) } diff --git a/internal/api/controller/execution/wire.go b/internal/api/controller/execution/wire.go index c85fd0ee2..29d6449ea 100644 --- a/internal/api/controller/execution/wire.go +++ b/internal/api/controller/execution/wire.go @@ -20,10 +20,10 @@ var WireSet = wire.NewSet( func ProvideController(db *sqlx.DB, authorizer authz.Authorizer, executionStore store.ExecutionStore, + repoStore store.RepoStore, stageStore store.StageStore, pipelineStore store.PipelineStore, - spaceStore store.SpaceStore, ) *Controller { - return NewController(db, authorizer, executionStore, stageStore, - pipelineStore, spaceStore) + return NewController(db, authorizer, executionStore, repoStore, stageStore, + pipelineStore) } diff --git a/internal/api/controller/logs/controller.go b/internal/api/controller/logs/controller.go index 273909c73..6a6d8f3b4 100644 --- a/internal/api/controller/logs/controller.go +++ b/internal/api/controller/logs/controller.go @@ -16,34 +16,34 @@ type Controller struct { db *sqlx.DB authorizer authz.Authorizer executionStore store.ExecutionStore + repoStore store.RepoStore pipelineStore store.PipelineStore stageStore store.StageStore stepStore store.StepStore logStore store.LogStore logStream livelog.LogStream - spaceStore store.SpaceStore } func NewController( db *sqlx.DB, authorizer authz.Authorizer, executionStore store.ExecutionStore, + repoStore store.RepoStore, pipelineStore store.PipelineStore, stageStore store.StageStore, stepStore store.StepStore, logStore store.LogStore, logStream livelog.LogStream, - spaceStore store.SpaceStore, ) *Controller { return &Controller{ db: db, authorizer: authorizer, executionStore: executionStore, + repoStore: repoStore, pipelineStore: pipelineStore, stageStore: stageStore, stepStore: stepStore, logStore: logStore, logStream: logStream, - spaceStore: spaceStore, } } diff --git a/internal/api/controller/logs/find.go b/internal/api/controller/logs/find.go index 06d92dfe9..3ff681bf3 100644 --- a/internal/api/controller/logs/find.go +++ b/internal/api/controller/logs/find.go @@ -17,27 +17,26 @@ import ( func (c *Controller) Find( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, executionNum int64, stageNum int, stepNum int, ) (io.ReadCloser, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find parent space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return nil, fmt.Errorf("failed to find pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineView) - if err != nil { - return nil, fmt.Errorf("could not authorize: %w", err) - } - execution, err := c.executionStore.Find(ctx, pipeline.ID, executionNum) if err != nil { return nil, fmt.Errorf("failed to find execution: %w", err) diff --git a/internal/api/controller/logs/tail.go b/internal/api/controller/logs/tail.go index 1cbcf45e0..2e2a37467 100644 --- a/internal/api/controller/logs/tail.go +++ b/internal/api/controller/logs/tail.go @@ -17,27 +17,25 @@ import ( func (c *Controller) Tail( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, executionNum int64, stageNum int, stepNum int, ) (<-chan *livelog.Line, <-chan error, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, nil, fmt.Errorf("failed to find parent space: %w", err) + return nil, nil, fmt.Errorf("failed to find repo by ref: %w", err) } - - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return nil, nil, fmt.Errorf("failed to find pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineView) - if err != nil { - return nil, nil, fmt.Errorf("could not authorize: %w", err) - } - execution, err := c.executionStore.Find(ctx, pipeline.ID, executionNum) if err != nil { return nil, nil, fmt.Errorf("failed to find execution: %w", err) diff --git a/internal/api/controller/logs/wire.go b/internal/api/controller/logs/wire.go index 3217dd1e3..21ec30db1 100644 --- a/internal/api/controller/logs/wire.go +++ b/internal/api/controller/logs/wire.go @@ -21,13 +21,13 @@ var WireSet = wire.NewSet( func ProvideController(db *sqlx.DB, authorizer authz.Authorizer, executionStore store.ExecutionStore, + repoStore store.RepoStore, pipelineStore store.PipelineStore, stageStore store.StageStore, stepStore store.StepStore, logStore store.LogStore, logStream livelog.LogStream, - spaceStore store.SpaceStore, ) *Controller { - return NewController(db, authorizer, executionStore, - pipelineStore, stageStore, stepStore, logStore, logStream, spaceStore) + return NewController(db, authorizer, executionStore, repoStore, + pipelineStore, stageStore, stepStore, logStore, logStream) } diff --git a/internal/api/controller/pipeline/controller.go b/internal/api/controller/pipeline/controller.go index 61020c78a..5168d28d3 100644 --- a/internal/api/controller/pipeline/controller.go +++ b/internal/api/controller/pipeline/controller.go @@ -20,7 +20,6 @@ type Controller struct { repoStore store.RepoStore authorizer authz.Authorizer pipelineStore store.PipelineStore - spaceStore store.SpaceStore } func NewController( @@ -30,7 +29,6 @@ func NewController( pathStore store.PathStore, repoStore store.RepoStore, pipelineStore store.PipelineStore, - spaceStore store.SpaceStore, ) *Controller { return &Controller{ db: db, @@ -39,6 +37,5 @@ func NewController( repoStore: repoStore, authorizer: authorizer, pipelineStore: pipelineStore, - spaceStore: spaceStore, } } diff --git a/internal/api/controller/pipeline/create.go b/internal/api/controller/pipeline/create.go index 2de13b205..4405b0037 100644 --- a/internal/api/controller/pipeline/create.go +++ b/internal/api/controller/pipeline/create.go @@ -7,7 +7,6 @@ package pipeline import ( "context" "fmt" - "strconv" "strings" "time" @@ -26,34 +25,26 @@ var ( ) type CreateInput struct { - Description string `json:"description"` - SpaceRef string `json:"space_ref"` - UID string `json:"uid"` - RepoRef string `json:"repo_ref"` // empty if repo_type != gitness - RepoType enum.ScmType `json:"repo_type"` - DefaultBranch string `json:"default_branch"` - ConfigPath string `json:"config_path"` + Description string `json:"description"` + UID string `json:"uid"` + DefaultBranch string `json:"default_branch"` + ConfigPath string `json:"config_path"` } -func (c *Controller) Create(ctx context.Context, session *auth.Session, in *CreateInput) (*types.Pipeline, error) { - parentSpace, err := c.spaceStore.FindByRef(ctx, in.SpaceRef) +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + repoRef string, + in *CreateInput, +) (*types.Pipeline, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find parent by ref: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, parentSpace.Path, in.UID, enum.PermissionPipelineEdit) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, "", enum.PermissionPipelineEdit) if err != nil { - return nil, err - } - - var repoID int64 - - if in.RepoType == enum.ScmTypeGitness { - repo, err := c.repoStore.FindByRef(ctx, in.RepoRef) - if err != nil { - return nil, fmt.Errorf("failed to find repo by ref: %w", err) - } - repoID = repo.ID + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) } if err := c.sanitizeCreateInput(in); err != nil { @@ -64,11 +55,9 @@ func (c *Controller) Create(ctx context.Context, session *auth.Session, in *Crea now := time.Now().UnixMilli() pipeline = &types.Pipeline{ Description: in.Description, - SpaceID: parentSpace.ID, + RepoID: repo.ID, UID: in.UID, Seq: 0, - RepoID: repoID, - RepoType: in.RepoType, DefaultBranch: in.DefaultBranch, ConfigPath: in.ConfigPath, Created: now, @@ -84,11 +73,6 @@ func (c *Controller) Create(ctx context.Context, session *auth.Session, in *Crea } func (c *Controller) sanitizeCreateInput(in *CreateInput) error { - parentRefAsID, err := strconv.ParseInt(in.SpaceRef, 10, 64) - if (err == nil && parentRefAsID <= 0) || (len(strings.TrimSpace(in.SpaceRef)) == 0) { - return errPipelineRequiresParent - } - if err := c.uidCheck(in.UID, false); err != nil { return err } diff --git a/internal/api/controller/pipeline/delete.go b/internal/api/controller/pipeline/delete.go index 92eba72ac..5831ce296 100644 --- a/internal/api/controller/pipeline/delete.go +++ b/internal/api/controller/pipeline/delete.go @@ -13,17 +13,18 @@ import ( "github.com/harness/gitness/types/enum" ) -func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef string, uid string) error { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) +func (c *Controller) Delete(ctx context.Context, session *auth.Session, repoRef string, uid string) error { + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return fmt.Errorf("failed to find parent space: %w", err) + return fmt.Errorf("failed to find repo by ref: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, uid, enum.PermissionPipelineDelete) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, uid, enum.PermissionPipelineDelete) if err != nil { - return fmt.Errorf("could not authorize: %w", err) + return fmt.Errorf("failed to authorize pipeline: %w", err) } - err = c.pipelineStore.DeleteByUID(ctx, space.ID, uid) + + err = c.pipelineStore.DeleteByUID(ctx, repo.ID, uid) if err != nil { return fmt.Errorf("could not delete pipeline: %w", err) } diff --git a/internal/api/controller/pipeline/find.go b/internal/api/controller/pipeline/find.go index f132227b9..70d74defe 100644 --- a/internal/api/controller/pipeline/find.go +++ b/internal/api/controller/pipeline/find.go @@ -17,16 +17,16 @@ import ( func (c *Controller) Find( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, uid string, ) (*types.Pipeline, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find parent space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, uid, enum.PermissionPipelineView) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, uid, enum.PermissionPipelineView) if err != nil { - return nil, fmt.Errorf("could not authorize: %w", err) + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) } - return c.pipelineStore.FindByUID(ctx, space.ID, uid) + return c.pipelineStore.FindByUID(ctx, repo.ID, uid) } diff --git a/internal/api/controller/pipeline/update.go b/internal/api/controller/pipeline/update.go index ef77518b3..cc9182346 100644 --- a/internal/api/controller/pipeline/update.go +++ b/internal/api/controller/pipeline/update.go @@ -23,21 +23,20 @@ type UpdateInput struct { func (c *Controller) Update( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, uid string, in *UpdateInput, ) (*types.Pipeline, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find parent space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, uid, enum.PermissionPipelineEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, uid, enum.PermissionPipelineEdit) - if err != nil { - return nil, fmt.Errorf("could not authorize: %w", err) - } - - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, uid) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, uid) if err != nil { return nil, fmt.Errorf("failed to find pipeline: %w", err) } diff --git a/internal/api/controller/pipeline/wire.go b/internal/api/controller/pipeline/wire.go index d665a8ee6..6dfcd0fc0 100644 --- a/internal/api/controller/pipeline/wire.go +++ b/internal/api/controller/pipeline/wire.go @@ -24,7 +24,6 @@ func ProvideController(db *sqlx.DB, repoStore store.RepoStore, authorizer authz.Authorizer, pipelineStore store.PipelineStore, - spaceStore store.SpaceStore, ) *Controller { - return NewController(db, uidCheck, authorizer, pathStore, repoStore, pipelineStore, spaceStore) + return NewController(db, uidCheck, authorizer, pathStore, repoStore, pipelineStore) } diff --git a/internal/api/controller/repo/controller.go b/internal/api/controller/repo/controller.go index 1c0357de5..6bae1d018 100644 --- a/internal/api/controller/repo/controller.go +++ b/internal/api/controller/repo/controller.go @@ -29,6 +29,7 @@ type Controller struct { pathStore store.PathStore repoStore store.RepoStore spaceStore store.SpaceStore + pipelineStore store.PipelineStore principalStore store.PrincipalStore gitRPCClient gitrpc.Interface } @@ -42,6 +43,7 @@ func NewController( pathStore store.PathStore, repoStore store.RepoStore, spaceStore store.SpaceStore, + pipelineStore store.PipelineStore, principalStore store.PrincipalStore, gitRPCClient gitrpc.Interface, ) *Controller { @@ -54,6 +56,7 @@ func NewController( pathStore: pathStore, repoStore: repoStore, spaceStore: spaceStore, + pipelineStore: pipelineStore, principalStore: principalStore, gitRPCClient: gitRPCClient, } diff --git a/internal/api/controller/repo/diff.go b/internal/api/controller/repo/diff.go index b39c04137..680cb49ad 100644 --- a/internal/api/controller/repo/diff.go +++ b/internal/api/controller/repo/diff.go @@ -92,6 +92,7 @@ func (c *Controller) DiffStats( ReadParams: gitrpc.CreateRPCReadParams(repo), BaseRef: info.BaseRef, HeadRef: info.HeadRef, + MergeBase: info.MergeBase, }) if err != nil { return types.DiffStats{}, err diff --git a/internal/api/controller/space/list_pipelines.go b/internal/api/controller/repo/list_pipelines.go similarity index 57% rename from internal/api/controller/space/list_pipelines.go rename to internal/api/controller/repo/list_pipelines.go index 87d999bb4..ae5e0dda5 100644 --- a/internal/api/controller/space/list_pipelines.go +++ b/internal/api/controller/repo/list_pipelines.go @@ -1,7 +1,7 @@ // Copyright 2022 Harness Inc. All rights reserved. // Use of this source code is governed by the Polyform Free Trial License // that can be found in the LICENSE.md file for this repository. -package space +package repo import ( "context" @@ -14,36 +14,45 @@ import ( "github.com/harness/gitness/types/enum" ) -// ListPipelines lists the pipelines in a space. +// ListPipelines lists the pipelines under a repository. func (c *Controller) ListPipelines( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, + latest bool, filter types.ListQueryFilter, ) ([]*types.Pipeline, int64, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, 0, fmt.Errorf("failed to find parent space: %w", err) + return nil, 0, fmt.Errorf("failed to find repo: %w", err) } - err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionPipelineView, false) + err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionPipelineView, false) if err != nil { - return nil, 0, fmt.Errorf("could not authorize: %w", err) + return nil, 0, fmt.Errorf("failed to authorize: %w", err) } var count int64 var pipelines []*types.Pipeline err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) (err error) { - count, err = c.pipelineStore.Count(ctx, space.ID, filter) + count, err = c.pipelineStore.Count(ctx, repo.ID, filter) if err != nil { return fmt.Errorf("failed to count child executions: %w", err) } - pipelines, err = c.pipelineStore.List(ctx, space.ID, filter) - if err != nil { - return fmt.Errorf("failed to count child executions: %w", err) + if !latest { + pipelines, err = c.pipelineStore.List(ctx, repo.ID, filter) + if err != nil { + return fmt.Errorf("failed to list pipelines: %w", err) + } + } else { + pipelines, err = c.pipelineStore.ListLatest(ctx, repo.ID, filter) + if err != nil { + return fmt.Errorf("failed to list latest pipelines: %w", err) + } } + return }, dbtx.TxDefaultReadOnly) if err != nil { diff --git a/internal/api/controller/repo/wire.go b/internal/api/controller/repo/wire.go index c642348e3..fba02dba7 100644 --- a/internal/api/controller/repo/wire.go +++ b/internal/api/controller/repo/wire.go @@ -23,7 +23,8 @@ var WireSet = wire.NewSet( func ProvideController(config *types.Config, db *sqlx.DB, urlProvider *url.Provider, uidCheck check.PathUID, authorizer authz.Authorizer, pathStore store.PathStore, repoStore store.RepoStore, - spaceStore store.SpaceStore, principalStore store.PrincipalStore, rpcClient gitrpc.Interface) *Controller { + spaceStore store.SpaceStore, pipelineStore store.PipelineStore, + principalStore store.PrincipalStore, rpcClient gitrpc.Interface) *Controller { return NewController(config.Git.DefaultBranch, db, urlProvider, uidCheck, - authorizer, pathStore, repoStore, spaceStore, principalStore, rpcClient) + authorizer, pathStore, repoStore, spaceStore, pipelineStore, principalStore, rpcClient) } diff --git a/internal/api/controller/trigger/controller.go b/internal/api/controller/trigger/controller.go index 3e0076037..304974bbc 100644 --- a/internal/api/controller/trigger/controller.go +++ b/internal/api/controller/trigger/controller.go @@ -16,7 +16,7 @@ type Controller struct { authorizer authz.Authorizer triggerStore store.TriggerStore pipelineStore store.PipelineStore - spaceStore store.SpaceStore + repoStore store.RepoStore } func NewController( @@ -24,13 +24,13 @@ func NewController( authorizer authz.Authorizer, triggerStore store.TriggerStore, pipelineStore store.PipelineStore, - spaceStore store.SpaceStore, + repoStore store.RepoStore, ) *Controller { return &Controller{ db: db, authorizer: authorizer, triggerStore: triggerStore, pipelineStore: pipelineStore, - spaceStore: spaceStore, + repoStore: repoStore, } } diff --git a/internal/api/controller/trigger/create.go b/internal/api/controller/trigger/create.go index 2bb847e01..8d95174a5 100644 --- a/internal/api/controller/trigger/create.go +++ b/internal/api/controller/trigger/create.go @@ -24,25 +24,24 @@ type CreateInput struct { func (c *Controller) Create( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, in *CreateInput, ) (*types.Trigger, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) } - - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) - if err != nil { - return nil, fmt.Errorf("failed to find pipeline: %w", err) - } - // Trigger permissions are associated with pipeline permissions. If a user has permissions // to edit the pipeline, they will have permissions to create a trigger as well. - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineEdit) + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) if err != nil { - return nil, fmt.Errorf("failed to authorize: %w", err) + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) } now := time.Now().UnixMilli() diff --git a/internal/api/controller/trigger/delete.go b/internal/api/controller/trigger/delete.go index 18e1ce147..d0355d5c1 100644 --- a/internal/api/controller/trigger/delete.go +++ b/internal/api/controller/trigger/delete.go @@ -16,26 +16,26 @@ import ( func (c *Controller) Delete( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, triggerUID string, ) error { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return fmt.Errorf("failed to find parent space: %w", err) + return fmt.Errorf("failed to find repo by ref: %w", err) + } + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to edit the pipeline, they will have permissions to remove a trigger as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) + if err != nil { + return fmt.Errorf("failed to authorize pipeline: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return fmt.Errorf("failed to find pipeline: %w", err) } - // Trigger permissions are associated with pipeline permissions. If a user has permissions - // to delete the pipeline, they will have permissions to remove a trigger as well. - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineEdit) - if err != nil { - return fmt.Errorf("could not authorize: %w", err) - } err = c.triggerStore.DeleteByUID(ctx, pipeline.ID, triggerUID) if err != nil { return fmt.Errorf("could not delete trigger: %w", err) diff --git a/internal/api/controller/trigger/find.go b/internal/api/controller/trigger/find.go index 908793bcd..3c805e35f 100644 --- a/internal/api/controller/trigger/find.go +++ b/internal/api/controller/trigger/find.go @@ -17,25 +17,24 @@ import ( func (c *Controller) Find( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, triggerUID string, ) (*types.Trigger, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find parent space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return nil, fmt.Errorf("failed to find pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineView) - if err != nil { - return nil, fmt.Errorf("could not authorize: %w", err) - } - trigger, err := c.triggerStore.FindByUID(ctx, pipeline.ID, triggerUID) if err != nil { return nil, fmt.Errorf("failed to find trigger %s: %w", triggerUID, err) diff --git a/internal/api/controller/trigger/list.go b/internal/api/controller/trigger/list.go index 753421221..868198b2b 100644 --- a/internal/api/controller/trigger/list.go +++ b/internal/api/controller/trigger/list.go @@ -16,22 +16,24 @@ import ( func (c *Controller) List( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, filter types.ListQueryFilter, ) ([]*types.Trigger, int64, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, 0, fmt.Errorf("failed to find parent space: %w", err) + return nil, 0, fmt.Errorf("failed to find repo by ref: %w", err) } - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to view the pipeline, they will have permissions to list triggers as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) if err != nil { - return nil, 0, fmt.Errorf("failed to find pipeline: %w", err) + return nil, 0, fmt.Errorf("failed to authorize pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipeline.UID, enum.PermissionPipelineView) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { - return nil, 0, fmt.Errorf("failed to authorize: %w", err) + return nil, 0, fmt.Errorf("failed to find pipeline: %w", err) } count, err := c.triggerStore.Count(ctx, pipeline.ID, filter) diff --git a/internal/api/controller/trigger/update.go b/internal/api/controller/trigger/update.go index d6aa64776..adef2f668 100644 --- a/internal/api/controller/trigger/update.go +++ b/internal/api/controller/trigger/update.go @@ -23,21 +23,22 @@ type UpdateInput struct { func (c *Controller) Update( ctx context.Context, session *auth.Session, - spaceRef string, + repoRef string, pipelineUID string, triggerUID string, in *UpdateInput) (*types.Trigger, error) { - space, err := c.spaceStore.FindByRef(ctx, spaceRef) + repo, err := c.repoStore.FindByRef(ctx, repoRef) if err != nil { - return nil, fmt.Errorf("failed to find space: %w", err) + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to edit the pipeline, they will have permissions to edit the trigger as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) } - err = apiauth.CheckPipeline(ctx, c.authorizer, session, space.Path, pipelineUID, enum.PermissionPipelineEdit) - if err != nil { - return nil, fmt.Errorf("failed to check auth: %w", err) - } - - pipeline, err := c.pipelineStore.FindByUID(ctx, space.ID, pipelineUID) + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) if err != nil { return nil, fmt.Errorf("failed to find pipeline: %w", err) } diff --git a/internal/api/controller/trigger/wire.go b/internal/api/controller/trigger/wire.go index 0d69575f1..8fb3ab6f7 100644 --- a/internal/api/controller/trigger/wire.go +++ b/internal/api/controller/trigger/wire.go @@ -21,7 +21,7 @@ func ProvideController(db *sqlx.DB, authorizer authz.Authorizer, triggerStore store.TriggerStore, pipelineStore store.PipelineStore, - spaceStore store.SpaceStore, + repoStore store.RepoStore, ) *Controller { - return NewController(db, authorizer, triggerStore, pipelineStore, spaceStore) + return NewController(db, authorizer, triggerStore, pipelineStore, repoStore) } diff --git a/internal/api/handler/execution/create.go b/internal/api/handler/execution/create.go index 97366148e..348972237 100644 --- a/internal/api/handler/execution/create.go +++ b/internal/api/handler/execution/create.go @@ -11,19 +11,18 @@ import ( "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleCreate(executionCtrl *execution.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -36,7 +35,7 @@ func HandleCreate(executionCtrl *execution.Controller) http.HandlerFunc { return } - execution, err := executionCtrl.Create(ctx, session, spaceRef, pipelineUID, in) + execution, err := executionCtrl.Create(ctx, session, repoRef, pipelineUID, in) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/execution/delete.go b/internal/api/handler/execution/delete.go index 77ee63e2f..164c24110 100644 --- a/internal/api/handler/execution/delete.go +++ b/internal/api/handler/execution/delete.go @@ -10,19 +10,18 @@ import ( "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleDelete(executionCtrl *execution.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -33,7 +32,7 @@ func HandleDelete(executionCtrl *execution.Controller) http.HandlerFunc { return } - err = executionCtrl.Delete(ctx, session, spaceRef, pipelineUID, n) + err = executionCtrl.Delete(ctx, session, repoRef, pipelineUID, n) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/execution/find.go b/internal/api/handler/execution/find.go index 9a4bc4c49..124da3ccc 100644 --- a/internal/api/handler/execution/find.go +++ b/internal/api/handler/execution/find.go @@ -10,14 +10,13 @@ import ( "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleFind(executionCtrl *execution.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -27,13 +26,13 @@ func HandleFind(executionCtrl *execution.Controller) http.HandlerFunc { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - execution, err := executionCtrl.Find(ctx, session, spaceRef, pipelineUID, n) + execution, err := executionCtrl.Find(ctx, session, repoRef, pipelineUID, n) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/execution/list.go b/internal/api/handler/execution/list.go index eb590f239..b69c9886a 100644 --- a/internal/api/handler/execution/list.go +++ b/internal/api/handler/execution/list.go @@ -10,19 +10,18 @@ import ( "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleList(executionCtrl *execution.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -30,7 +29,7 @@ func HandleList(executionCtrl *execution.Controller) http.HandlerFunc { pagination := request.ParsePaginationFromRequest(r) - repos, totalCount, err := executionCtrl.List(ctx, session, spaceRef, pipelineUID, pagination) + repos, totalCount, err := executionCtrl.List(ctx, session, repoRef, pipelineUID, pagination) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/execution/update.go b/internal/api/handler/execution/update.go index 14fdf18f9..5709bf21d 100644 --- a/internal/api/handler/execution/update.go +++ b/internal/api/handler/execution/update.go @@ -11,7 +11,6 @@ import ( "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleUpdate(executionCtrl *execution.Controller) http.HandlerFunc { @@ -26,12 +25,12 @@ func HandleUpdate(executionCtrl *execution.Controller) http.HandlerFunc { return } - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -42,7 +41,7 @@ func HandleUpdate(executionCtrl *execution.Controller) http.HandlerFunc { return } - pipeline, err := executionCtrl.Update(ctx, session, spaceRef, pipelineUID, n, in) + pipeline, err := executionCtrl.Update(ctx, session, repoRef, pipelineUID, n, in) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/logs/find.go b/internal/api/handler/logs/find.go index fb0832a84..b1e7b469c 100644 --- a/internal/api/handler/logs/find.go +++ b/internal/api/handler/logs/find.go @@ -11,14 +11,18 @@ import ( "github.com/harness/gitness/internal/api/controller/logs" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleFind(logCtrl *logs.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -38,14 +42,8 @@ func HandleFind(logCtrl *logs.Controller) http.HandlerFunc { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) - if err != nil { - render.TranslatedUserError(w, err) - return - } - rc, err := logCtrl.Find( - ctx, session, spaceRef, pipelineUID, + ctx, session, repoRef, pipelineUID, executionNum, int(stageNum), int(stepNum)) if err != nil { render.TranslatedUserError(w, err) diff --git a/internal/api/handler/logs/tail.go b/internal/api/handler/logs/tail.go index 122eb2379..b611d0ad4 100644 --- a/internal/api/handler/logs/tail.go +++ b/internal/api/handler/logs/tail.go @@ -14,7 +14,6 @@ import ( "github.com/harness/gitness/internal/api/controller/logs" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" "github.com/rs/zerolog/log" ) @@ -28,7 +27,7 @@ func HandleTail(logCtrl *logs.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -48,7 +47,7 @@ func HandleTail(logCtrl *logs.Controller) http.HandlerFunc { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -65,7 +64,7 @@ func HandleTail(logCtrl *logs.Controller) http.HandlerFunc { f.Flush() linec, errc, err := logCtrl.Tail( - ctx, session, spaceRef, pipelineUID, + ctx, session, repoRef, pipelineUID, executionNum, int(stageNum), int(stepNum)) if err != nil { render.TranslatedUserError(w, err) diff --git a/internal/api/handler/pipeline/create.go b/internal/api/handler/pipeline/create.go index df1f49a9c..9018512cd 100644 --- a/internal/api/handler/pipeline/create.go +++ b/internal/api/handler/pipeline/create.go @@ -18,14 +18,20 @@ func HandleCreate(pipelineCtrl *pipeline.Controller) http.HandlerFunc { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + in := new(pipeline.CreateInput) - err := json.NewDecoder(r.Body).Decode(in) + err = json.NewDecoder(r.Body).Decode(in) if err != nil { render.BadRequestf(w, "Invalid Request Body: %s.", err) return } - pipeline, err := pipelineCtrl.Create(ctx, session, in) + pipeline, err := pipelineCtrl.Create(ctx, session, repoRef, in) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/pipeline/delete.go b/internal/api/handler/pipeline/delete.go index 1679df7a1..656e6f566 100644 --- a/internal/api/handler/pipeline/delete.go +++ b/internal/api/handler/pipeline/delete.go @@ -10,25 +10,24 @@ import ( "github.com/harness/gitness/internal/api/controller/pipeline" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleDelete(pipelineCtrl *pipeline.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - err = pipelineCtrl.Delete(ctx, session, spaceRef, pipelineUID) + err = pipelineCtrl.Delete(ctx, session, repoRef, pipelineUID) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/pipeline/find.go b/internal/api/handler/pipeline/find.go index 74ee6c77d..f2b70948a 100644 --- a/internal/api/handler/pipeline/find.go +++ b/internal/api/handler/pipeline/find.go @@ -10,25 +10,24 @@ import ( "github.com/harness/gitness/internal/api/controller/pipeline" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleFind(pipelineCtrl *pipeline.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - pipeline, err := pipelineCtrl.Find(ctx, session, spaceRef, pipelineUID) + pipeline, err := pipelineCtrl.Find(ctx, session, repoRef, pipelineUID) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/pipeline/update.go b/internal/api/handler/pipeline/update.go index 631ea4a77..05ed6ed61 100644 --- a/internal/api/handler/pipeline/update.go +++ b/internal/api/handler/pipeline/update.go @@ -11,7 +11,6 @@ import ( "github.com/harness/gitness/internal/api/controller/pipeline" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleUpdate(pipelineCtrl *pipeline.Controller) http.HandlerFunc { @@ -26,18 +25,18 @@ func HandleUpdate(pipelineCtrl *pipeline.Controller) http.HandlerFunc { return } - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - pipeline, err := pipelineCtrl.Update(ctx, session, spaceRef, pipelineUID, in) + pipeline, err := pipelineCtrl.Update(ctx, session, repoRef, pipelineUID, in) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/space/list_pipelines.go b/internal/api/handler/repo/list_pipelines.go similarity index 69% rename from internal/api/handler/space/list_pipelines.go rename to internal/api/handler/repo/list_pipelines.go index 30aba9b4b..8faff73c4 100644 --- a/internal/api/handler/space/list_pipelines.go +++ b/internal/api/handler/repo/list_pipelines.go @@ -2,28 +2,29 @@ // Use of this source code is governed by the Polyform Free Trial License // that can be found in the LICENSE.md file for this repository. -package space +package repo import ( "net/http" - "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/controller/repo" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" ) -func HandleListPipelines(spaceCtrl *space.Controller) http.HandlerFunc { +func HandleListPipelines(repoCtrl *repo.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - spaceRef, err := request.GetSpaceRefFromPath(r) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } filter := request.ParseListQueryFilterFromRequest(r) - repos, totalCount, err := spaceCtrl.ListPipelines(ctx, session, spaceRef, filter) + latest := request.GetLatestFromPath(r) + repos, totalCount, err := repoCtrl.ListPipelines(ctx, session, repoRef, latest, filter) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/trigger/create.go b/internal/api/handler/trigger/create.go index 52d36c166..2caa51fbc 100644 --- a/internal/api/handler/trigger/create.go +++ b/internal/api/handler/trigger/create.go @@ -11,19 +11,18 @@ import ( "github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleCreate(triggerCtrl *trigger.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -36,7 +35,7 @@ func HandleCreate(triggerCtrl *trigger.Controller) http.HandlerFunc { return } - trigger, err := triggerCtrl.Create(ctx, session, spaceRef, pipelineUID, in) + trigger, err := triggerCtrl.Create(ctx, session, repoRef, pipelineUID, in) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/trigger/delete.go b/internal/api/handler/trigger/delete.go index 492624bbf..a83fba7bd 100644 --- a/internal/api/handler/trigger/delete.go +++ b/internal/api/handler/trigger/delete.go @@ -10,19 +10,18 @@ import ( "github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleDelete(triggerCtrl *trigger.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -33,7 +32,7 @@ func HandleDelete(triggerCtrl *trigger.Controller) http.HandlerFunc { return } - err = triggerCtrl.Delete(ctx, session, spaceRef, pipelineUID, triggerUID) + err = triggerCtrl.Delete(ctx, session, repoRef, pipelineUID, triggerUID) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/trigger/find.go b/internal/api/handler/trigger/find.go index f60714a78..6d1ae53c4 100644 --- a/internal/api/handler/trigger/find.go +++ b/internal/api/handler/trigger/find.go @@ -10,14 +10,13 @@ import ( "github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleFind(triggerCtrl *trigger.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -27,13 +26,13 @@ func HandleFind(triggerCtrl *trigger.Controller) http.HandlerFunc { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - trigger, err := triggerCtrl.Find(ctx, session, spaceRef, pipelineUID, triggerUID) + trigger, err := triggerCtrl.Find(ctx, session, repoRef, pipelineUID, triggerUID) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/trigger/list.go b/internal/api/handler/trigger/list.go index 615f83f25..268cb7037 100644 --- a/internal/api/handler/trigger/list.go +++ b/internal/api/handler/trigger/list.go @@ -10,19 +10,18 @@ import ( "github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleList(triggerCtrl *trigger.Controller) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() session, _ := request.AuthSessionFrom(ctx) - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -30,7 +29,7 @@ func HandleList(triggerCtrl *trigger.Controller) http.HandlerFunc { filter := request.ParseListQueryFilterFromRequest(r) - repos, totalCount, err := triggerCtrl.List(ctx, session, spaceRef, pipelineUID, filter) + repos, totalCount, err := triggerCtrl.List(ctx, session, repoRef, pipelineUID, filter) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/handler/trigger/update.go b/internal/api/handler/trigger/update.go index 04d87c3d4..be63b6f63 100644 --- a/internal/api/handler/trigger/update.go +++ b/internal/api/handler/trigger/update.go @@ -11,7 +11,6 @@ import ( "github.com/harness/gitness/internal/api/controller/trigger" "github.com/harness/gitness/internal/api/render" "github.com/harness/gitness/internal/api/request" - "github.com/harness/gitness/internal/paths" ) func HandleUpdate(triggerCtrl *trigger.Controller) http.HandlerFunc { @@ -26,12 +25,12 @@ func HandleUpdate(triggerCtrl *trigger.Controller) http.HandlerFunc { return } - pipelineRef, err := request.GetPipelineRefFromPath(r) + pipelineUID, err := request.GetPipelineUIDFromPath(r) if err != nil { render.TranslatedUserError(w, err) return } - spaceRef, pipelineUID, err := paths.DisectLeaf(pipelineRef) + repoRef, err := request.GetRepoRefFromPath(r) if err != nil { render.TranslatedUserError(w, err) return @@ -42,7 +41,7 @@ func HandleUpdate(triggerCtrl *trigger.Controller) http.HandlerFunc { return } - pipeline, err := triggerCtrl.Update(ctx, session, spaceRef, pipelineUID, triggerUID, in) + pipeline, err := triggerCtrl.Update(ctx, session, repoRef, pipelineUID, triggerUID, in) if err != nil { render.TranslatedUserError(w, err) return diff --git a/internal/api/openapi/pipeline.go b/internal/api/openapi/pipeline.go index c618200f8..4b2d46489 100644 --- a/internal/api/openapi/pipeline.go +++ b/internal/api/openapi/pipeline.go @@ -7,9 +7,11 @@ package openapi import ( "net/http" + "github.com/gotidy/ptr" "github.com/harness/gitness/internal/api/controller/execution" "github.com/harness/gitness/internal/api/controller/pipeline" "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/request" "github.com/harness/gitness/internal/api/usererror" "github.com/harness/gitness/types" @@ -17,7 +19,8 @@ import ( ) type pipelineRequest struct { - Ref string `path:"pipeline_ref"` + repoRequest + Ref string `path:"pipeline_uid"` } type executionRequest struct { @@ -47,6 +50,7 @@ type createTriggerRequest struct { } type createPipelineRequest struct { + repoRequest pipeline.CreateInput } @@ -77,6 +81,20 @@ type updatePipelineRequest struct { pipeline.UpdateInput } +var queryParameterLatest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamLatest, + In: openapi3.ParameterInQuery, + Description: ptr.String("Whether to fetch latest build information for each pipeline."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeBoolean), + }, + }, + }, +} + func pipelineOperations(reflector *openapi3.Reflector) { opCreate := openapi3.Operation{} opCreate.WithTags("pipeline") @@ -87,7 +105,19 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusUnauthorized) _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusForbidden) - _ = reflector.Spec.AddOperation(http.MethodPost, "/pipelines", opCreate) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pipelines", opCreate) + + opPipelines := openapi3.Operation{} + opPipelines.WithTags("pipeline") + opPipelines.WithMapOfAnything(map[string]interface{}{"operationId": "listPipelines"}) + opPipelines.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit, queryParameterLatest) + _ = reflector.SetRequest(&opPipelines, new(repoRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opPipelines, []types.Pipeline{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pipelines", opPipelines) opFind := openapi3.Operation{} opFind.WithTags("pipeline") @@ -98,7 +128,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusUnauthorized) _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) - _ = reflector.Spec.AddOperation(http.MethodGet, "/pipelines/{pipeline_ref}", opFind) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pipelines/{pipeline_uid}", opFind) opDelete := openapi3.Operation{} opDelete.WithTags("pipeline") @@ -109,7 +139,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) - _ = reflector.Spec.AddOperation(http.MethodDelete, "/pipelines/{pipeline_ref}", opDelete) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}/pipelines/{pipeline_uid}", opDelete) opUpdate := openapi3.Operation{} opUpdate.WithTags("pipeline") @@ -122,7 +152,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodPatch, - "/pipelines/{pipeline_ref}", opUpdate) + "/repos/{repo_ref}/pipelines/{pipeline_uid}", opUpdate) executionCreate := openapi3.Operation{} executionCreate.WithTags("pipeline") @@ -134,7 +164,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&executionCreate, new(usererror.Error), http.StatusUnauthorized) _ = reflector.SetJSONResponse(&executionCreate, new(usererror.Error), http.StatusForbidden) _ = reflector.Spec.AddOperation(http.MethodPost, - "/pipelines/{pipeline_ref}/executions", executionCreate) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions", executionCreate) executionFind := openapi3.Operation{} executionFind.WithTags("pipeline") @@ -146,7 +176,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&executionFind, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&executionFind, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodGet, - "/pipelines/{pipeline_ref}/executions/{execution_number}", executionFind) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionFind) executionDelete := openapi3.Operation{} executionDelete.WithTags("pipeline") @@ -158,7 +188,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&executionDelete, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&executionDelete, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodDelete, - "/pipelines/{pipeline_ref}/executions/{execution_number}", executionDelete) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionDelete) executionUpdate := openapi3.Operation{} executionUpdate.WithTags("pipeline") @@ -171,7 +201,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&executionUpdate, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodPatch, - "/pipelines/{pipeline_ref}/executions/{execution_number}", executionUpdate) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionUpdate) executionList := openapi3.Operation{} executionList.WithTags("pipeline") @@ -184,7 +214,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&executionList, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&executionList, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodGet, - "/pipelines/{pipeline_ref}/executions", executionList) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions", executionList) triggerCreate := openapi3.Operation{} triggerCreate.WithTags("pipeline") @@ -196,7 +226,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&triggerCreate, new(usererror.Error), http.StatusUnauthorized) _ = reflector.SetJSONResponse(&triggerCreate, new(usererror.Error), http.StatusForbidden) _ = reflector.Spec.AddOperation(http.MethodPost, - "/pipelines/{pipeline_ref}/triggers", triggerCreate) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers", triggerCreate) triggerFind := openapi3.Operation{} triggerFind.WithTags("pipeline") @@ -208,7 +238,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&triggerFind, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&triggerFind, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodGet, - "/pipelines/{pipeline_ref}/triggers/{trigger_uid}", triggerFind) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}", triggerFind) triggerDelete := openapi3.Operation{} triggerDelete.WithTags("pipeline") @@ -220,7 +250,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&triggerDelete, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&triggerDelete, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodDelete, - "/pipelines/{pipeline_ref}/triggers/{trigger_uid}", triggerDelete) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}", triggerDelete) triggerUpdate := openapi3.Operation{} triggerUpdate.WithTags("pipeline") @@ -233,7 +263,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodPatch, - "/pipelines/{pipeline_ref}/triggers/{trigger_uid}", triggerUpdate) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}", triggerUpdate) triggerList := openapi3.Operation{} triggerList.WithTags("pipeline") @@ -246,7 +276,7 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&triggerList, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&triggerList, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodGet, - "/pipelines/{pipeline_ref}/triggers", triggerList) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers", triggerList) logView := openapi3.Operation{} logView.WithTags("pipeline") @@ -258,5 +288,5 @@ func pipelineOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&logView, new(usererror.Error), http.StatusForbidden) _ = reflector.SetJSONResponse(&logView, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodGet, - "/pipelines/{pipeline_ref}/executions/{execution_number}/logs/{stage_number}/{step_number}", logView) + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}/logs/{stage_number}/{step_number}", logView) } diff --git a/internal/api/openapi/plugin.go b/internal/api/openapi/plugin.go index 71432f548..fcca4bdbf 100644 --- a/internal/api/openapi/plugin.go +++ b/internal/api/openapi/plugin.go @@ -7,11 +7,11 @@ package openapi import ( "net/http" - "github.com/gotidy/ptr" "github.com/harness/gitness/internal/api/request" "github.com/harness/gitness/internal/api/usererror" "github.com/harness/gitness/types" + "github.com/gotidy/ptr" "github.com/swaggest/openapi-go/openapi3" ) diff --git a/internal/api/openapi/space.go b/internal/api/openapi/space.go index cf63a9980..1a2117938 100644 --- a/internal/api/openapi/space.go +++ b/internal/api/openapi/space.go @@ -230,18 +230,6 @@ func spaceOperations(reflector *openapi3.Reflector) { _ = reflector.SetJSONResponse(&opRepos, new(usererror.Error), http.StatusNotFound) _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/repos", opRepos) - opPipelines := openapi3.Operation{} - opPipelines.WithTags("space") - opPipelines.WithMapOfAnything(map[string]interface{}{"operationId": "listPipelines"}) - opPipelines.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit) - _ = reflector.SetRequest(&opPipelines, new(spaceRequest), http.MethodGet) - _ = reflector.SetJSONResponse(&opPipelines, []types.Pipeline{}, http.StatusOK) - _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusInternalServerError) - _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusUnauthorized) - _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusForbidden) - _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusNotFound) - _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/pipelines", opPipelines) - opTemplates := openapi3.Operation{} opTemplates.WithTags("space") opTemplates.WithMapOfAnything(map[string]interface{}{"operationId": "listTemplates"}) diff --git a/internal/api/request/pipeline.go b/internal/api/request/pipeline.go index e7aa62189..1aec54ed4 100644 --- a/internal/api/request/pipeline.go +++ b/internal/api/request/pipeline.go @@ -10,14 +10,15 @@ import ( ) const ( - PathParamPipelineRef = "pipeline_ref" + PathParamPipelineRef = "pipeline_uid" PathParamExecutionNumber = "execution_number" PathParamStageNumber = "stage_number" PathParamStepNumber = "step_number" PathParamTriggerUID = "trigger_uid" + QueryParamLatest = "latest" ) -func GetPipelineRefFromPath(r *http.Request) (string, error) { +func GetPipelineUIDFromPath(r *http.Request) (string, error) { rawRef, err := PathParamOrError(r, PathParamPipelineRef) if err != nil { return "", err @@ -39,6 +40,14 @@ func GetStepNumberFromPath(r *http.Request) (int64, error) { return PathParamAsPositiveInt64(r, PathParamStepNumber) } +func GetLatestFromPath(r *http.Request) bool { + v, _ := QueryParam(r, QueryParamLatest) + if v == "true" { + return true + } + return false +} + func GetTriggerUIDFromPath(r *http.Request) (string, error) { rawRef, err := PathParamOrError(r, PathParamTriggerUID) if err != nil { diff --git a/internal/router/api.go b/internal/router/api.go index d71aaacfa..87f20dd62 100644 --- a/internal/router/api.go +++ b/internal/router/api.go @@ -71,7 +71,7 @@ type APIHandler interface { var ( // terminatedPathPrefixesAPI is the list of prefixes that will require resolving terminated paths. - terminatedPathPrefixesAPI = []string{"/v1/spaces/", "/v1/repos/", "/v1/pipelines/", + terminatedPathPrefixesAPI = []string{"/v1/spaces/", "/v1/repos/", "/v1/secrets/", "/v1/connectors", "/v1/templates"} ) @@ -162,8 +162,7 @@ func setupRoutesV1(r chi.Router, sysCtrl *system.Controller, ) { setupSpaces(r, spaceCtrl) - setupRepos(r, repoCtrl, pullreqCtrl, webhookCtrl, checkCtrl) - setupPipelines(r, pipelineCtrl, executionCtrl, triggerCtrl, logCtrl) + setupRepos(r, repoCtrl, pipelineCtrl, executionCtrl, triggerCtrl, logCtrl, pullreqCtrl, webhookCtrl, checkCtrl) setupConnectors(r, connectorCtrl) setupTemplates(r, templateCtrl) setupSecrets(r, secretCtrl) @@ -193,7 +192,6 @@ func setupSpaces(r chi.Router, spaceCtrl *space.Controller) { r.Get("/spaces", handlerspace.HandleListSpaces(spaceCtrl)) r.Get("/repos", handlerspace.HandleListRepos(spaceCtrl)) r.Get("/service-accounts", handlerspace.HandleListServiceAccounts(spaceCtrl)) - r.Get("/pipelines", handlerspace.HandleListPipelines(spaceCtrl)) r.Get("/secrets", handlerspace.HandleListSecrets(spaceCtrl)) r.Get("/connectors", handlerspace.HandleListConnectors(spaceCtrl)) r.Get("/templates", handlerspace.HandleListTemplates(spaceCtrl)) @@ -223,6 +221,10 @@ func setupSpaces(r chi.Router, spaceCtrl *space.Controller) { func setupRepos(r chi.Router, repoCtrl *repo.Controller, + pipelineCtrl *pipeline.Controller, + executionCtrl *execution.Controller, + triggerCtrl *trigger.Controller, + logCtrl *logs.Controller, pullreqCtrl *pullreq.Controller, webhookCtrl *webhook.Controller, checkCtrl *check.Controller, @@ -312,6 +314,8 @@ func setupRepos(r chi.Router, setupWebhook(r, webhookCtrl) + setupPipelines(r, repoCtrl, pipelineCtrl, executionCtrl, triggerCtrl, logCtrl) + SetupChecks(r, checkCtrl) }) }) @@ -319,11 +323,13 @@ func setupRepos(r chi.Router, func setupPipelines( r chi.Router, + repoCtrl *repo.Controller, pipelineCtrl *pipeline.Controller, executionCtrl *execution.Controller, triggerCtrl *trigger.Controller, logCtrl *logs.Controller) { r.Route("/pipelines", func(r chi.Router) { + r.Get("/", handlerrepo.HandleListPipelines(repoCtrl)) // Create takes path and parentId via body, not uri r.Post("/", handlerpipeline.HandleCreate(pipelineCtrl)) r.Route(fmt.Sprintf("/{%s}", request.PathParamPipelineRef), func(r chi.Router) { diff --git a/internal/services/job/executor.go b/internal/services/job/executor.go new file mode 100644 index 000000000..7f80917d3 --- /dev/null +++ b/internal/services/job/executor.go @@ -0,0 +1,142 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "context" + "errors" + "fmt" + "runtime/debug" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// Executor holds map of Handler objects per each job type registered. +// The Scheduler uses the Executor to start execution of jobs. +type Executor struct { + handlerMap map[string]Handler + handlerComplete bool + store store.JobStore + publisher pubsub.Publisher +} + +const ( + ProgressMin = 0 + ProgressMax = 100 +) + +// ProgressReporter can be used by a job Handler to report back the execution progress. +type ProgressReporter func(progress int, result string) error + +// Handler is a job executor for a specific job type. +// An implementation should try to honor the context and +// try to abort the execution as soon as the context is done. +type Handler interface { + Handle(ctx context.Context, input string, fn ProgressReporter) (result string, err error) +} + +var noHandlerDefinedError = errors.New("no handler registered for the job type") + +// NewExecutor creates new Executor. +func NewExecutor(jobStore store.JobStore, publisher pubsub.Publisher) *Executor { + return &Executor{ + handlerMap: make(map[string]Handler), + handlerComplete: false, + store: jobStore, + publisher: publisher, + } +} + +// Register registers a job Handler for the provided job type. +// This function is not thread safe. All calls are expected to be made +// in a single thread during the application boot time. +func (e *Executor) Register(jobType string, exec Handler) error { + if jobType == "" { + return errors.New("jobType must not be empty") + } + + if e.handlerComplete { + return errors.New("job handler registration is complete") + } + + if exec == nil { + return errors.New("provided Handler is nil") + } + + if _, ok := e.handlerMap[jobType]; ok { + return fmt.Errorf("a Handler is already defined to run the '%s' job types", jobType) + } + + e.handlerMap[jobType] = exec + + return nil +} + +// finishRegistration forbids further registration of job types. +// It is called by the Scheduler when it starts. +func (e *Executor) finishRegistration() { + e.handlerComplete = true +} + +// exec runs a single job. This function is synchronous, +// so the caller is responsible to run it in a separate go-routine. +func (e *Executor) exec( + ctx context.Context, + jobUID, jobType string, + input string, +) (result string, err error) { + defer func() { + if r := recover(); r != nil { + err = fmt.Errorf( + "panic while processing job=%s type=%s: %v\n%s", + jobUID, jobType, r, debug.Stack()) + } + }() + + exec, ok := e.handlerMap[jobType] + if !ok { + return "", noHandlerDefinedError + } + + // progressReporter is the function with which the job can update its progress. + // This function will be executed in the job executor's Go-routine. + // It uses the job's context. + progressReporter := func(progress int, result string) error { + if progress < ProgressMin || progress > ProgressMax { + return errors.New("progress must be between 0 and 100") + } + + jobDummy := &types.Job{ + UID: jobUID, + Updated: time.Now().UnixMilli(), + Result: result, + State: enum.JobStateRunning, + RunProgress: progress, + } + + // This doesn't need to be behind the global lock because it only updates the single row. + // While a job is running no other process should touch it. + // Even this call will fail if the context deadline has been exceeded. + // The job parameter is a dummy types.Job object that just holds fields that should be updated. + if err := e.store.UpdateProgress(ctx, jobDummy); err != nil { + return err + } + + // tell everybody that a job progress has been updated + if err := publishStateChange(ctx, e.publisher, jobDummy); err != nil { + log.Err(err).Msg("failed to publish job state change") + } + + return nil + } + + return exec.Handle(ctx, input, progressReporter) // runs the job +} diff --git a/internal/services/job/job_overdue.go b/internal/services/job/job_overdue.go new file mode 100644 index 000000000..dbd45a566 --- /dev/null +++ b/internal/services/job/job_overdue.go @@ -0,0 +1,89 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +const ( + jobUIDOverdue = "gitness:jobs:overdue" + jobTypeOverdue = "gitness:jobs:overdue" + jobCronOverdue = "*/20 * * * *" // every 20 min +) + +type jobOverdue struct { + store store.JobStore + mxManager lock.MutexManager + scheduler *Scheduler +} + +func newJobOverdue(jobStore store.JobStore, mxManager lock.MutexManager, scheduler *Scheduler) *jobOverdue { + return &jobOverdue{ + store: jobStore, + mxManager: mxManager, + scheduler: scheduler, + } +} + +// Handle reclaims overdue jobs. Normally this shouldn't happen. +// But, it can occur if DB update after a job execution fails, +// or the server suddenly terminates while the job is still running. +func (j *jobOverdue) Handle(ctx context.Context, _ string, _ ProgressReporter) (string, error) { + mx, err := globalLock(ctx, j.mxManager) + if err != nil { + return "", fmt.Errorf("failed to obtain the lock to reclaim overdue jobs") + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Err(err).Msg("failed to release global lock after reclaiming overdue jobs") + } + }() + + overdueJobs, err := j.store.ListDeadlineExceeded(ctx, time.Now()) + if err != nil { + return "", fmt.Errorf("failed to list overdue jobs") + } + + if len(overdueJobs) == 0 { + return "", nil + } + + var minScheduled time.Time + + for _, job := range overdueJobs { + const errorMessage = "deadline exceeded" + postExec(job, "", errorMessage) + + err = j.store.UpdateExecution(ctx, job) + if err != nil { + return "", fmt.Errorf("failed update overdue job") + } + + if job.State == enum.JobStateScheduled { + scheduled := time.UnixMilli(job.Scheduled) + if minScheduled.IsZero() || minScheduled.After(scheduled) { + minScheduled = scheduled + } + } + } + + if !minScheduled.IsZero() { + j.scheduler.scheduleProcessing(minScheduled) + } + + result := fmt.Sprintf("found %d overdue jobs", len(overdueJobs)) + + return result, nil +} diff --git a/internal/services/job/job_purge.go b/internal/services/job/job_purge.go new file mode 100644 index 000000000..0fe757a4a --- /dev/null +++ b/internal/services/job/job_purge.go @@ -0,0 +1,67 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + + "github.com/rs/zerolog/log" +) + +const ( + jobUIDPurge = "gitness:jobs:purge" + jobTypePurge = "gitness:jobs:purge" + jobCronPurge = "15 */4 * * *" // every 4 hours at 15 minutes +) + +type jobPurge struct { + store store.JobStore + mxManager lock.MutexManager + minOldAge time.Duration +} + +func newJobPurge(jobStore store.JobStore, mxManager lock.MutexManager, minOldAge time.Duration) *jobPurge { + if minOldAge < 0 { + minOldAge = 0 + } + + return &jobPurge{ + store: jobStore, + mxManager: mxManager, + minOldAge: minOldAge, + } +} + +func (j *jobPurge) Handle(ctx context.Context, _ string, _ ProgressReporter) (string, error) { + mx, err := globalLock(ctx, j.mxManager) + if err != nil { + return "", fmt.Errorf("failed to obtain the lock to clean up old jobs") + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Err(err).Msg("failed to release global lock after cleaning up old jobs") + } + }() + + olderThan := time.Now().Add(-j.minOldAge) + + n, err := j.store.DeleteOld(ctx, olderThan) + if err != nil { + return "", fmt.Errorf("failed to purge old jobs") + } + + result := "no old jobs found" + if n > 0 { + result = fmt.Sprintf("deleted %d old jobs", n) + } + + return result, nil +} diff --git a/internal/services/job/lock.go b/internal/services/job/lock.go new file mode 100644 index 000000000..81210e1fa --- /dev/null +++ b/internal/services/job/lock.go @@ -0,0 +1,23 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "context" + + "github.com/harness/gitness/lock" +) + +func globalLock(ctx context.Context, manager lock.MutexManager) (lock.Mutex, error) { + const lockKey = "jobs" + mx, err := manager.NewMutex(lockKey) + if err != nil { + return nil, err + } + + err = mx.Lock(ctx) + + return mx, err +} diff --git a/internal/services/job/pubsub.go b/internal/services/job/pubsub.go new file mode 100644 index 000000000..8f13b44da --- /dev/null +++ b/internal/services/job/pubsub.go @@ -0,0 +1,60 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "bytes" + "context" + "encoding/gob" + "fmt" + + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" +) + +const ( + PubSubTopicCancelJob = "gitness:job:cancel_job" + PubSubTopicStateChange = "gitness:job:state_change" +) + +func encodeStateChange(job *types.Job) ([]byte, error) { + stateChange := &types.JobStateChange{ + UID: job.UID, + State: job.State, + Progress: job.RunProgress, + Result: job.Result, + Failure: job.LastFailureError, + } + + buffer := bytes.NewBuffer(nil) + if err := gob.NewEncoder(buffer).Encode(stateChange); err != nil { + return nil, err + } + + return buffer.Bytes(), nil +} + +func DecodeStateChange(payload []byte) (*types.JobStateChange, error) { + stateChange := &types.JobStateChange{} + if err := gob.NewDecoder(bytes.NewReader(payload)).Decode(stateChange); err != nil { + return nil, err + } + + return stateChange, nil +} + +func publishStateChange(ctx context.Context, publisher pubsub.Publisher, job *types.Job) error { + payload, err := encodeStateChange(job) + if err != nil { + return fmt.Errorf("failed to gob encode JobStateChange: %w", err) + } + + err = publisher.Publish(ctx, PubSubTopicStateChange, payload) + if err != nil { + return fmt.Errorf("failed to publish JobStateChange: %w", err) + } + + return nil +} diff --git a/internal/services/job/scheduler.go b/internal/services/job/scheduler.go new file mode 100644 index 000000000..4c251b18e --- /dev/null +++ b/internal/services/job/scheduler.go @@ -0,0 +1,688 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "context" + "errors" + "fmt" + "runtime/debug" + "sync" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gorhill/cronexpr" + "github.com/rs/zerolog/log" +) + +// Scheduler controls execution of background jobs. +type Scheduler struct { + // dependencies + store store.JobStore + executor *Executor + mxManager lock.MutexManager + pubsubService pubsub.PubSub + + // configuration fields + instanceID string + maxRunning int + purgeMinOldAge time.Duration + + // synchronization stuff + signal chan time.Time + done chan struct{} + wgRunning sync.WaitGroup + cancelJobMx sync.Mutex + cancelJobMap map[string]context.CancelFunc +} + +func NewScheduler( + jobStore store.JobStore, + executor *Executor, + mxManager lock.MutexManager, + pubsubService pubsub.PubSub, + instanceID string, + maxRunning int, + purgeMinOldAge time.Duration, +) (*Scheduler, error) { + if maxRunning < 1 { + maxRunning = 1 + } + return &Scheduler{ + store: jobStore, + executor: executor, + mxManager: mxManager, + pubsubService: pubsubService, + + instanceID: instanceID, + maxRunning: maxRunning, + purgeMinOldAge: purgeMinOldAge, + + cancelJobMap: map[string]context.CancelFunc{}, + }, nil +} + +// Run runs the background job scheduler. +// It's a blocking call. It blocks until the provided context is done. +func (s *Scheduler) Run(ctx context.Context) error { + if s.done != nil { + return errors.New("already started") + } + + consumer := s.pubsubService.Subscribe(ctx, PubSubTopicCancelJob, s.handleCancelJob) + defer func() { + err := consumer.Close() + if err != nil { + log.Ctx(ctx).Err(err). + Msg("job scheduler: failed to close pubsub cancel job consumer") + } + }() + + if err := s.createNecessaryJobs(ctx); err != nil { + return fmt.Errorf("failed to create necessary jobs: %w", err) + } + + if err := s.registerNecessaryJobs(); err != nil { + return fmt.Errorf("failed to register scheduler's internal jobs: %w", err) + } + + s.executor.finishRegistration() + + log.Ctx(ctx).Debug().Msg("job scheduler: starting") + + s.done = make(chan struct{}) + defer close(s.done) + + s.signal = make(chan time.Time, 1) + + timer := newSchedulerTimer() + defer timer.Stop() + + for { + err := func() error { + defer func() { + if r := recover(); r != nil { + stack := string(debug.Stack()) + log.Ctx(ctx).Error(). + Str("panic", fmt.Sprintf("[%T] job scheduler panic: %v", r, r)). + Msg(stack) + } + }() + + select { + case <-ctx.Done(): + return ctx.Err() + + case newTime := <-s.signal: + dur := timer.RescheduleEarlier(newTime) + if dur > 0 { + log.Ctx(ctx).Trace(). + Msgf("job scheduler: update of scheduled job processing time... runs in %s", dur) + } + return nil + + case now := <-timer.Ch(): + count, nextExec, gotAllJobs, err := s.processReadyJobs(ctx, now) + + // If the next processing time isn't known use the default. + if nextExec.IsZero() { + const period = time.Minute + nextExec = now.Add(period) + } + + // Reset the timer. Make the timer edgy if there are more jobs available. + dur := timer.ResetAt(nextExec, !gotAllJobs) + + if err != nil { + log.Ctx(ctx).Err(err). + Msgf("job scheduler: failed to process jobs; next iteration in %s", dur) + } else { + log.Ctx(ctx).Trace(). + Msgf("job scheduler: started %d jobs; next iteration in %s", count, dur) + } + + return nil + } + }() + if err != nil { + return err + } + } +} + +// WaitJobsDone waits until execution of all jobs has finished. +// It is intended to be used for graceful shutdown, after the Run method has finished. +func (s *Scheduler) WaitJobsDone(ctx context.Context) { + log.Ctx(ctx).Debug().Msg("job scheduler: stopping... waiting for the currently running jobs to finish") + + ch := make(chan struct{}) + go func() { + s.wgRunning.Wait() + close(ch) + }() + + select { + case <-ctx.Done(): + log.Ctx(ctx).Warn().Msg("job scheduler: stop interrupted") + case <-ch: + log.Ctx(ctx).Info().Msg("job scheduler: gracefully stopped") + } +} + +func (s *Scheduler) CancelJob(ctx context.Context, jobUID string) error { + s.cancelJobMx.Lock() + cancelFn, ok := s.cancelJobMap[jobUID] + s.cancelJobMx.Unlock() + + if ok { + cancelFn() + return nil + } + + return s.pubsubService.Publish(ctx, PubSubTopicCancelJob, []byte(jobUID)) +} + +func (s *Scheduler) handleCancelJob(payload []byte) error { + jobUID := string(payload) + if jobUID == "" { + return nil + } + + s.cancelJobMx.Lock() + cancelFn, ok := s.cancelJobMap[jobUID] + s.cancelJobMx.Unlock() + + if ok { + cancelFn() + } + + return nil +} + +// scheduleProcessing triggers processing of ready jobs. +// This should be run after adding new jobs to the database. +func (s *Scheduler) scheduleProcessing(scheduled time.Time) { + go func() { + select { + case <-s.done: + case s.signal <- scheduled: + } + }() +} + +// scheduleIfHaveMoreJobs triggers processing of ready jobs if the timer is edgy. +// The timer would be edgy if the previous iteration found more jobs that it could start (full capacity). +// This should be run after a non-recurring job has finished. +func (s *Scheduler) scheduleIfHaveMoreJobs() { + s.scheduleProcessing(time.Time{}) // zero time will trigger the timer if it's edgy +} + +// RunJob runs a single job of the provided type. +// All parameters a job receives must be inside the data string (as JSON or whatever the job handler can interpret). +// The caller gets the job UID which can then by used to track execution (the job state and progress percentage). +func (s *Scheduler) RunJob(ctx context.Context, + jobType string, + maxRetries int, + timeout time.Duration, + data string, +) (string, error) { + if jobType == "" { + return "", errors.New("jobType must not be empty") + } + + if maxRetries < 0 { + return "", errors.New("maxRetries must be positive") + } + + if timeout < time.Second { + return "", errors.New("timeout too short") + } + + uid, err := UID() + if err != nil { + return "", fmt.Errorf("failed to generate job UID: %w", err) + } + + nowMilli := time.Now().UnixMilli() + + job := &types.Job{ + UID: uid, + Created: nowMilli, + Updated: nowMilli, + Type: jobType, + Priority: enum.JobPriorityNormal, + Data: data, + Result: "", + MaxDurationSeconds: int(timeout / time.Second), + MaxRetries: maxRetries, + State: enum.JobStateScheduled, + Scheduled: nowMilli, + TotalExecutions: 0, + RunBy: "", + RunDeadline: nowMilli, + RunProgress: ProgressMin, + LastExecuted: 0, // never executed + IsRecurring: false, + RecurringCron: "", + ConsecutiveFailures: 0, + LastFailureError: "", + } + + ctx = log.Ctx(ctx).With(). + Str("job.UID", job.UID). + Str("job.Type", job.Type). + Logger().WithContext(ctx) + + mx, err := globalLock(ctx, s.mxManager) + if err != nil { + return "", fmt.Errorf("failed to obtain global lock to add new job: %w", err) + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Ctx(ctx).Err(err).Msg("failed to release global lock after adding a new job") + } + }() + + err = s.store.Create(ctx, job) + if err != nil { + return "", fmt.Errorf("failed to add new job to the database: %w", err) + } + + available, err := s.availableSlots(ctx) + if err != nil { + return "", fmt.Errorf("failed to count available slots for job execution: %w", err) + } + + if available == 0 { + // already running at the full capacity: the job is scheduled, we're done. + return uid, nil + } + + err = s.runJob(ctx, job) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to run job") + } + + return uid, nil +} + +// processReadyJobs executes jobs that are ready to run. This function is periodically run by the Scheduler. +// The function returns the number of jobs it has is started, the next scheduled execution time (of this function) +// and a bool value if all currently available ready jobs were started. +// Internally the Scheduler uses an "edgy" timer to reschedule calls of this function. +// The edgy option of the timer will be on if this function hasn't been able to start all job that are ready to run. +// If the timer has the edgy option turned on it will trigger the timer (and thus this function will be called) +// when any currently running job finishes successfully or fails. +func (s *Scheduler) processReadyJobs(ctx context.Context, now time.Time) (int, time.Time, bool, error) { + mx, err := globalLock(ctx, s.mxManager) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to obtain global lock to periodically process ready jobs: %w", err) + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Ctx(ctx).Err(err). + Msg("failed to release global lock after periodic processing of ready jobs") + } + }() + + availableCount, err := s.availableSlots(ctx) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to count available slots for job execution: %w", err) + } + + // get one over the limit to check if all ready jobs are fetched + jobs, err := s.store.ListReady(ctx, now, availableCount+1) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to load scheduled jobs: %w", err) + } + + var ( + countExecuted int + knownNextExecTime time.Time + gotAllJobs bool + ) + + if len(jobs) > availableCount { + // More jobs are ready than we are able to run. + jobs = jobs[:availableCount] + } else { + gotAllJobs = true + knownNextExecTime, err = s.store.NextScheduledTime(ctx, now) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to read next scheduled time: %w", err) + } + } + + for _, job := range jobs { + jobCtx := log.Ctx(ctx).With(). + Str("job.UID", job.UID). + Str("job.Type", job.Type). + Logger().WithContext(ctx) + + err = s.runJob(jobCtx, job) + if err != nil { + knownNextExecTime = time.Time{} + gotAllJobs = false + log.Ctx(jobCtx).Err(err).Msg("failed to run job") + continue + } + + countExecuted++ + } + + return countExecuted, knownNextExecTime, gotAllJobs, nil +} + +func (s *Scheduler) availableSlots(ctx context.Context) (int, error) { + countRunning, err := s.store.CountRunning(ctx) + if err != nil { + return 0, err + } + + availableCount := s.maxRunning - countRunning + if availableCount < 0 { + return 0, nil + } + + return availableCount, nil +} + +// runJob updates the job in the database and starts it in a separate goroutine. +// The function will also log the execution. +func (s *Scheduler) runJob(ctx context.Context, job *types.Job) error { + // Update the job fields for the new execution + s.preExec(job) + + if err := s.store.UpdateExecution(ctx, job); err != nil { + return err + } + + // tell everybody that a job has started + if err := publishStateChange(ctx, s.pubsubService, job); err != nil { + log.Err(err).Msg("failed to publish job state change") + } + + s.wgRunning.Add(1) + go func(jobCtx context.Context, job *types.Job) { + defer s.wgRunning.Done() + + log.Ctx(jobCtx).Debug().Msg("started job") + + timeStart := time.Now() + + // Run the job + execResult, execFailure := s.doExec(jobCtx, job) + + // Update the job fields, reschedule if necessary. + postExec(job, execResult, execFailure) + + // Use the context.Background() because we want to update the job even if the job's context is done. + // The context can be done because the job exceeded its deadline or the server is shutting down. + backgroundCtx := context.Background() + + // tell everybody that a job has finished execution + if err := publishStateChange(backgroundCtx, s.pubsubService, job); err != nil { + log.Ctx(jobCtx).Err(err).Msg("failed to publish job state change") + } + + if mx, err := globalLock(backgroundCtx, s.mxManager); err != nil { + // If locking failed, just log the error and proceed to update the DB anyway. + log.Ctx(jobCtx).Err(err).Msg("failed to obtain global lock to update job after execution") + } else { + defer func() { + if err := mx.Unlock(backgroundCtx); err != nil { + log.Ctx(jobCtx).Err(err).Msg("failed to release global lock to update job after execution") + } + }() + } + + if err := s.store.UpdateExecution(backgroundCtx, job); err != nil { + log.Ctx(jobCtx).Err(err).Msg("failed to update after execution") + return + } + + logInfo := log.Ctx(jobCtx).Info().Str("duration", time.Since(timeStart).String()) + + if job.IsRecurring { + logInfo = logInfo.Bool("job.IsRecurring", true) + } + if job.Result != "" { + logInfo = logInfo.Str("job.Result", job.Result) + } + if job.LastFailureError != "" { + logInfo = logInfo.Str("job.Failure", job.LastFailureError) + } + + switch job.State { + case enum.JobStateFinished: + logInfo.Msg("job successfully finished") + s.scheduleIfHaveMoreJobs() + + case enum.JobStateFailed: + logInfo.Msg("job failed") + s.scheduleIfHaveMoreJobs() + + case enum.JobStateScheduled: + scheduledTime := time.UnixMilli(job.Scheduled) + logInfo. + Str("job.Scheduled", scheduledTime.Format(time.RFC3339Nano)). + Msg("job finished and rescheduled") + + s.scheduleProcessing(scheduledTime) + + case enum.JobStateRunning: + log.Ctx(jobCtx).Error().Msg("should not happen; job still has state=running after finishing") + } + }(ctx, job) + + return nil +} + +// preExec updates the provided types.Job before execution. +func (s *Scheduler) preExec(job *types.Job) { + if job.MaxDurationSeconds < 1 { + job.MaxDurationSeconds = 1 + } + + now := time.Now() + nowMilli := now.UnixMilli() + + execDuration := time.Duration(job.MaxDurationSeconds) * time.Second + execDeadline := now.Add(execDuration) + + job.Updated = nowMilli + job.LastExecuted = nowMilli + job.State = enum.JobStateRunning + job.RunDeadline = execDeadline.UnixMilli() + job.RunBy = s.instanceID + job.RunProgress = ProgressMin + job.TotalExecutions++ + job.Result = "" + job.LastFailureError = "" +} + +// doExec executes the provided types.Job. +func (s *Scheduler) doExec(ctx context.Context, job *types.Job) (execResult, execError string) { + execDeadline := time.UnixMilli(job.RunDeadline) + + jobCtx, done := context.WithDeadline(ctx, execDeadline) + defer done() + + s.cancelJobMx.Lock() + if _, ok := s.cancelJobMap[job.UID]; ok { + // should not happen: jobs have unique UIDs! + s.cancelJobMx.Unlock() + return "", "failed to start: already running" + } + s.cancelJobMap[job.UID] = done + s.cancelJobMx.Unlock() + + defer func() { + s.cancelJobMx.Lock() + delete(s.cancelJobMap, job.UID) + s.cancelJobMx.Unlock() + }() + + execResult, err := s.executor.exec(jobCtx, job.UID, job.Type, job.Data) + if err != nil { + execError = err.Error() + } + + return +} + +// postExec updates the provided types.Job after execution and reschedules it if necessary. +func postExec(job *types.Job, resultData, resultErr string) { + now := time.Now() + nowMilli := now.UnixMilli() + + job.Updated = nowMilli + job.Result = resultData + job.RunBy = "" + + if resultErr != "" { + job.ConsecutiveFailures++ + job.State = enum.JobStateFailed + job.LastFailureError = resultErr + } else { + job.State = enum.JobStateFinished + job.RunProgress = ProgressMax + } + + // Reschedule recurring jobs + if job.IsRecurring { + if resultErr == "" { + job.ConsecutiveFailures = 0 + } + + exp, err := cronexpr.Parse(job.RecurringCron) + if err != nil { + job.State = enum.JobStateFailed + + messages := fmt.Sprintf("failed to parse cron string: %s", err.Error()) + if job.LastFailureError != "" { + messages = messages + "; " + job.LastFailureError + } + + job.LastFailureError = messages + } else { + job.State = enum.JobStateScheduled + job.Scheduled = exp.Next(now).UnixMilli() + } + + return + } + + // Reschedule the failed job if retrying is allowed + if job.State == enum.JobStateFailed && job.ConsecutiveFailures <= job.MaxRetries { + const retryDelay = 15 * time.Second + job.State = enum.JobStateScheduled + job.Scheduled = now.Add(retryDelay).UnixMilli() + job.RunProgress = ProgressMin + } +} + +func (s *Scheduler) AddRecurring( + ctx context.Context, + jobUID, + jobType, + cronDef string, + maxDur time.Duration, +) error { + cronExp, err := cronexpr.Parse(cronDef) + if err != nil { + return fmt.Errorf("invalid cron definition string for job type=%s: %w", jobType, err) + } + + now := time.Now() + nowMilli := now.UnixMilli() + + nextExec := cronExp.Next(now) + + job := &types.Job{ + UID: jobUID, + Created: nowMilli, + Updated: nowMilli, + Type: jobType, + Priority: enum.JobPriorityElevated, + Data: "", + Result: "", + MaxDurationSeconds: int(maxDur / time.Second), + MaxRetries: 0, + State: enum.JobStateScheduled, + Scheduled: nextExec.UnixMilli(), + TotalExecutions: 0, + RunBy: "", + RunDeadline: 0, + RunProgress: 0, + LastExecuted: 0, + IsRecurring: true, + RecurringCron: cronDef, + ConsecutiveFailures: 0, + LastFailureError: "", + } + + err = s.store.Upsert(ctx, job) + if err != nil { + return fmt.Errorf("failed to upsert job id=%s type=%s: %w", jobUID, jobType, err) + } + + return nil +} + +func (s *Scheduler) createNecessaryJobs(ctx context.Context) error { + mx, err := globalLock(ctx, s.mxManager) + if err != nil { + return fmt.Errorf("failed to obtain global lock to create necessary jobs: %w", err) + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Ctx(ctx).Err(err). + Msg("failed to release global lock after creating necessary jobs") + } + }() + + err = s.AddRecurring(ctx, jobUIDPurge, jobTypePurge, jobCronPurge, 5*time.Second) + if err != nil { + return err + } + + err = s.AddRecurring(ctx, jobUIDOverdue, jobTypeOverdue, jobCronOverdue, 5*time.Second) + if err != nil { + return err + } + + return nil +} + +// registerNecessaryJobs registers two jobs: overdue job recovery and purge old finished jobs. +// These two jobs types are integral part of the job scheduler. +func (s *Scheduler) registerNecessaryJobs() error { + handlerOverdue := newJobOverdue(s.store, s.mxManager, s) + err := s.executor.Register(jobTypeOverdue, handlerOverdue) + if err != nil { + return err + } + + handlerPurge := newJobPurge(s.store, s.mxManager, s.purgeMinOldAge) + err = s.executor.Register(jobTypePurge, handlerPurge) + if err != nil { + return err + } + + return nil +} diff --git a/internal/services/job/timer.go b/internal/services/job/timer.go new file mode 100644 index 000000000..dfc93f70a --- /dev/null +++ b/internal/services/job/timer.go @@ -0,0 +1,111 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "time" +) + +const timerMaxDur = 30 * time.Minute +const timerMinDur = time.Nanosecond + +type schedulerTimer struct { + timerAt time.Time + timer *time.Timer + edgy bool // if true, the next RescheduleEarlier call will trigger the timer immediately. +} + +// newSchedulerTimer created new timer for the Scheduler. It is created to fire immediately. +func newSchedulerTimer() *schedulerTimer { + return &schedulerTimer{ + timerAt: time.Now().Add(timerMinDur), + timer: time.NewTimer(timerMinDur), + } +} + +// ResetAt resets the internal timer to trigger at the provided time. +// If the provided time is zero, it will schedule it to after the max duration. +func (t *schedulerTimer) ResetAt(next time.Time, edgy bool) time.Duration { + return t.resetAt(time.Now(), next, edgy) +} + +func (t *schedulerTimer) resetAt(now, next time.Time, edgy bool) time.Duration { + var dur time.Duration + + dur = next.Sub(now) + if dur < timerMinDur { + dur = timerMinDur + next = now.Add(dur) + } else if dur > timerMaxDur { + dur = timerMaxDur + next = now.Add(dur) + } + + t.Stop() + t.edgy = edgy + t.timerAt = next + t.timer.Reset(dur) + + return dur +} + +// RescheduleEarlier will reset the timer if the new time is earlier than the previous time. +// Otherwise, the function does nothing and returns 0. +// Providing zero time triggers the timer if it's edgy, otherwise does nothing. +func (t *schedulerTimer) RescheduleEarlier(next time.Time) time.Duration { + return t.rescheduleEarlier(time.Now(), next) +} + +func (t *schedulerTimer) rescheduleEarlier(now, next time.Time) time.Duration { + var dur time.Duration + + switch { + case t.edgy: + // if the timer is edgy trigger it immediately + dur = timerMinDur + + case next.IsZero(): + // if the provided time is zero: trigger the timer if it's edgy otherwise do nothing + if !t.edgy { + return 0 + } + dur = timerMinDur + + case !next.Before(t.timerAt): + // do nothing if the timer is already scheduled to run sooner than the provided time + return 0 + + default: + dur = next.Sub(now) + if dur < timerMinDur { + dur = timerMinDur + } + } + + next = now.Add(dur) + + t.Stop() + t.timerAt = next + t.timer.Reset(dur) + + return dur +} + +func (t *schedulerTimer) Ch() <-chan time.Time { + return t.timer.C +} + +func (t *schedulerTimer) Stop() { + // stop the timer + t.timer.Stop() + + // consume the timer's tick if any + select { + case <-t.timer.C: + default: + } + + t.timerAt = time.Time{} +} diff --git a/internal/services/job/timer_test.go b/internal/services/job/timer_test.go new file mode 100644 index 000000000..fe38b0f7c --- /dev/null +++ b/internal/services/job/timer_test.go @@ -0,0 +1,105 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "testing" + "time" +) + +func TestSchedulerTimer_ResetAt(t *testing.T) { + now := time.Now() + tests := []struct { + name string + at time.Time + exp time.Duration + }{ + { + name: "zero", + at: time.Time{}, + exp: timerMinDur, + }, + { + name: "immediate", + at: now, + exp: timerMinDur, + }, + { + name: "30s", + at: now.Add(30 * time.Second), + exp: 30 * time.Second, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + timer := newSchedulerTimer() + dur := timer.resetAt(now, test.at, false) + if want, got := test.exp, dur; want != dur { + t.Errorf("want: %s, got: %s", want.String(), got.String()) + } + }) + } +} + +func TestSchedulerTimer_TryResetAt(t *testing.T) { + now := time.Now() + tests := []struct { + name string + at time.Time + edgy bool + exp time.Duration + }{ + { + name: "past", + at: now.Add(-time.Second), + exp: timerMinDur, + }, + { + name: "30s", + at: now.Add(30 * time.Second), + exp: 30 * time.Second, + }, + { + name: "90s", + at: now.Add(90 * time.Second), + exp: 0, + }, + { + name: "30s-edgy", + at: now.Add(30 * time.Second), + edgy: true, + exp: timerMinDur, + }, + { + name: "90s-edgy", + at: now.Add(90 * time.Second), + edgy: true, + exp: timerMinDur, + }, + { + name: "zero", + at: time.Time{}, + exp: 0, + }, + { + name: "zero-edgy", + at: time.Time{}, + edgy: true, + exp: timerMinDur, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + timer := newSchedulerTimer() + timer.resetAt(now, now.Add(time.Minute), test.edgy) + dur := timer.rescheduleEarlier(now, test.at) + if want, got := test.exp, dur; want != dur { + t.Errorf("want: %s, got: %s", want.String(), got.String()) + } + }) + } +} diff --git a/internal/services/job/uid.go b/internal/services/job/uid.go new file mode 100644 index 000000000..ac23c34a1 --- /dev/null +++ b/internal/services/job/uid.go @@ -0,0 +1,25 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "crypto/rand" + "encoding/base32" +) + +// UID returns unique random string with length equal to 16. +func UID() (string, error) { + const uidSizeBytes = 10 // must be divisible by 5, the resulting string length will be uidSizeBytes/5*8 + + var buf [uidSizeBytes]byte + _, err := rand.Read(buf[:]) + if err != nil { + return "", err + } + + uid := base32.StdEncoding.EncodeToString(buf[:]) + + return uid, nil +} diff --git a/internal/services/job/wire.go b/internal/services/job/wire.go new file mode 100644 index 000000000..0a5c49a5b --- /dev/null +++ b/internal/services/job/wire.go @@ -0,0 +1,47 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package job + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideExecutor, + ProvideScheduler, +) + +func ProvideExecutor( + jobStore store.JobStore, + pubsubService pubsub.PubSub, +) *Executor { + return NewExecutor( + jobStore, + pubsubService, + ) +} + +func ProvideScheduler( + jobStore store.JobStore, + executor *Executor, + mutexManager lock.MutexManager, + pubsubService pubsub.PubSub, + config *types.Config, +) (*Scheduler, error) { + return NewScheduler( + jobStore, + executor, + mutexManager, + pubsubService, + config.InstanceID, + config.BackgroundJobs.MaxRunning, + config.BackgroundJobs.PurgeFinishedOlderThan, + ) +} diff --git a/internal/services/services.go b/internal/services/services.go index 70985dae3..1cb5efedd 100644 --- a/internal/services/services.go +++ b/internal/services/services.go @@ -5,6 +5,7 @@ package services import ( + "github.com/harness/gitness/internal/services/job" "github.com/harness/gitness/internal/services/pullreq" "github.com/harness/gitness/internal/services/webhook" @@ -16,16 +17,22 @@ var WireSet = wire.NewSet( ) type Services struct { - ws *webhook.Service - bms *pullreq.Service + Webhook *webhook.Service + PullReq *pullreq.Service + JobExecutor *job.Executor + JobScheduler *job.Scheduler } func ProvideServices( - ws *webhook.Service, - bms *pullreq.Service, + webhooksSrv *webhook.Service, + pullReqSrv *pullreq.Service, + jobExecutor *job.Executor, + jobScheduler *job.Scheduler, ) Services { return Services{ - ws: ws, - bms: bms, + Webhook: webhooksSrv, + PullReq: pullReqSrv, + JobExecutor: jobExecutor, + JobScheduler: jobScheduler, } } diff --git a/internal/store/database.go b/internal/store/database.go index 39549c4ed..d0db0f65f 100644 --- a/internal/store/database.go +++ b/internal/store/database.go @@ -441,6 +441,42 @@ type ( Delete(ctx context.Context, repoID, reqCheckID int64) error } + JobStore interface { + // Find fetches a job by its unique identifier. + Find(ctx context.Context, uid string) (*types.Job, error) + + // Create is used to create a new job. + Create(ctx context.Context, job *types.Job) error + + // Upsert will insert the job in the database if the job didn't already exist, + // or it will update the existing one but only if its definition has changed. + Upsert(ctx context.Context, job *types.Job) error + + // UpdateDefinition is used to update a job definition. + UpdateDefinition(ctx context.Context, job *types.Job) error + + // UpdateExecution is used to update a job before and after execution. + UpdateExecution(ctx context.Context, job *types.Job) error + + // UpdateProgress is used to update a job progress data. + UpdateProgress(ctx context.Context, job *types.Job) error + + // CountRunning returns number of jobs that are currently being run. + CountRunning(ctx context.Context) (int, error) + + // ListReady returns a list of jobs that are ready for execution. + ListReady(ctx context.Context, now time.Time, limit int) ([]*types.Job, error) + + // ListDeadlineExceeded returns a list of jobs that have exceeded their execution deadline. + ListDeadlineExceeded(ctx context.Context, now time.Time) ([]*types.Job, error) + + // NextScheduledTime returns a scheduled time of the next ready job. + NextScheduledTime(ctx context.Context, now time.Time) (time.Time, error) + + // DeleteOld removes non-recurring jobs that have finished execution or have failed. + DeleteOld(ctx context.Context, olderThan time.Time) (int64, error) + } + PipelineStore interface { // Find returns a pipeline given a pipeline ID from the datastore. Find(ctx context.Context, id int64) (*types.Pipeline, error) @@ -454,8 +490,12 @@ type ( // Update tries to update a pipeline in the datastore Update(ctx context.Context, pipeline *types.Pipeline) error - // List lists the pipelines present in a parent space ID in the datastore. - List(ctx context.Context, spaceID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error) + // List lists the pipelines present in a repository in the datastore. + List(ctx context.Context, repoID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error) + + // ListLatest lists the pipelines present in a repository in the datastore. + // It also returns latest build information for all the returned entries. + ListLatest(ctx context.Context, repoID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error) // UpdateOptLock updates the pipeline using the optimistic locking mechanism. UpdateOptLock(ctx context.Context, pipeline *types.Pipeline, @@ -464,11 +504,11 @@ type ( // Delete deletes a pipeline ID from the datastore. Delete(ctx context.Context, id int64) error - // Count the number of pipelines in a space matching the given filter. - Count(ctx context.Context, spaceID int64, filter types.ListQueryFilter) (int64, error) + // Count the number of pipelines in a repository matching the given filter. + Count(ctx context.Context, repoID int64, filter types.ListQueryFilter) (int64, error) - // DeleteByUID deletes a pipeline with a given UID in a space - DeleteByUID(ctx context.Context, spaceID int64, uid string) error + // DeleteByUID deletes a pipeline with a given UID under a repo. + DeleteByUID(ctx context.Context, repoID int64, uid string) error // IncrementSeqNum increments the sequence number of the pipeline IncrementSeqNum(ctx context.Context, pipeline *types.Pipeline) (*types.Pipeline, error) diff --git a/internal/store/database/execution.go b/internal/store/database/execution.go index 51c087ed6..4bc586995 100644 --- a/internal/store/database/execution.go +++ b/internal/store/database/execution.go @@ -14,6 +14,7 @@ import ( "github.com/harness/gitness/store/database" "github.com/harness/gitness/store/database/dbtx" "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" "github.com/jmoiron/sqlx" sqlxtypes "github.com/jmoiron/sqlx/types" @@ -294,6 +295,7 @@ func (s *executionStore) UpdateOptLock(ctx context.Context, } // List lists the executions for a given pipeline ID. +// It orders them in descending order of execution number. func (s *executionStore) List( ctx context.Context, pipelineID int64, @@ -302,7 +304,8 @@ func (s *executionStore) List( stmt := database.Builder. Select(executionColumns). From("executions"). - Where("execution_pipeline_id = ?", fmt.Sprint(pipelineID)) + Where("execution_pipeline_id = ?", fmt.Sprint(pipelineID)). + OrderBy("execution_number " + enum.OrderDesc.String()) stmt = stmt.Limit(database.Limit(pagination.Size)) stmt = stmt.Offset(database.Offset(pagination.Page, pagination.Size)) diff --git a/internal/store/database/execution_map.go b/internal/store/database/execution_map.go index caf937667..526fbaf54 100644 --- a/internal/store/database/execution_map.go +++ b/internal/store/database/execution_map.go @@ -1,14 +1,12 @@ package database import ( - "encoding/json" - "github.com/harness/gitness/types" ) func mapInternalToExecution(in *execution) (*types.Execution, error) { var params map[string]string - err := json.Unmarshal(in.Params, ¶ms) + err := in.Params.Unmarshal(¶ms) if err != nil { return nil, err } diff --git a/internal/store/database/job.go b/internal/store/database/job.go new file mode 100644 index 000000000..3e40075a6 --- /dev/null +++ b/internal/store/database/job.go @@ -0,0 +1,430 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package database + +import ( + "context" + "database/sql" + "errors" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +var _ store.JobStore = (*JobStore)(nil) + +func NewJobStore(db *sqlx.DB) *JobStore { + return &JobStore{ + db: db, + } +} + +type JobStore struct { + db *sqlx.DB +} + +const ( + jobColumns = ` + job_uid + ,job_created + ,job_updated + ,job_type + ,job_priority + ,job_data + ,job_result + ,job_max_duration_seconds + ,job_max_retries + ,job_state + ,job_scheduled + ,job_total_executions + ,job_run_by + ,job_run_deadline + ,job_run_progress + ,job_last_executed + ,job_is_recurring + ,job_recurring_cron + ,job_consecutive_failures + ,job_last_failure_error` + + jobSelectBase = ` + SELECT` + jobColumns + ` + FROM jobs` +) + +// Find fetches a job by its unique identifier. +func (s *JobStore) Find(ctx context.Context, uid string) (*types.Job, error) { + const sqlQuery = jobSelectBase + ` + WHERE job_uid = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + result := &types.Job{} + if err := db.GetContext(ctx, result, sqlQuery, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find job by uid") + } + + return result, nil +} + +// Create creates a new job. +func (s *JobStore) Create(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + INSERT INTO jobs (` + jobColumns + ` + ) VALUES ( + :job_uid + ,:job_created + ,:job_updated + ,:job_type + ,:job_priority + ,:job_data + ,:job_result + ,:job_max_duration_seconds + ,:job_max_retries + ,:job_state + ,:job_scheduled + ,:job_total_executions + ,:job_run_by + ,:job_run_deadline + ,:job_run_progress + ,:job_last_executed + ,:job_is_recurring + ,:job_recurring_cron + ,:job_consecutive_failures + ,:job_last_failure_error + )` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object") + } + + if _, err := db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// Upsert creates or updates a job. If the job didn't exist it will insert it in the database, +// otherwise it will update it but only if its definition has changed. +func (s *JobStore) Upsert(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + INSERT INTO jobs (` + jobColumns + ` + ) VALUES ( + :job_uid + ,:job_created + ,:job_updated + ,:job_type + ,:job_priority + ,:job_data + ,:job_result + ,:job_max_duration_seconds + ,:job_max_retries + ,:job_state + ,:job_scheduled + ,:job_total_executions + ,:job_run_by + ,:job_run_deadline + ,:job_run_progress + ,:job_last_executed + ,:job_is_recurring + ,:job_recurring_cron + ,:job_consecutive_failures + ,:job_last_failure_error + ) + ON CONFLICT (job_uid) DO + UPDATE SET + job_updated = :job_updated + ,job_type = :job_type + ,job_priority = :job_priority + ,job_data = :job_data + ,job_result = :job_result + ,job_max_duration_seconds = :job_max_duration_seconds + ,job_max_retries = :job_max_retries + ,job_state = :job_state + ,job_scheduled = :job_scheduled + ,job_is_recurring = :job_is_recurring + ,job_recurring_cron = :job_recurring_cron + WHERE + jobs.job_type <> :job_type OR + jobs.job_priority <> :job_priority OR + jobs.job_data <> :job_data OR + jobs.job_max_duration_seconds <> :job_max_duration_seconds OR + jobs.job_max_retries <> :job_max_retries OR + jobs.job_is_recurring <> :job_is_recurring OR + jobs.job_recurring_cron <> :job_recurring_cron` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object") + } + + if _, err := db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Upsert query failed") + } + + return nil +} + +// UpdateDefinition is used to update a job definition. +func (s *JobStore) UpdateDefinition(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + UPDATE jobs + SET + job_updated = :job_updated + ,job_type = :job_type + ,job_priority = :job_priority + ,job_data = :job_data + ,job_result = :job_result + ,job_max_duration_seconds = :job_max_duration_seconds + ,job_max_retries = :job_max_retries + ,job_state = :job_state + ,job_scheduled = :job_scheduled + ,job_is_recurring = :job_is_recurring + ,job_recurring_cron = :job_recurring_cron + WHERE job_uid = :job_uid` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object for update") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update job definition") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrResourceNotFound + } + + return nil +} + +// UpdateExecution is used to update a job before and after execution. +func (s *JobStore) UpdateExecution(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + UPDATE jobs + SET + job_updated = :job_updated + ,job_result = :job_result + ,job_state = :job_state + ,job_scheduled = :job_scheduled + ,job_total_executions = :job_total_executions + ,job_run_by = :job_run_by + ,job_run_deadline = :job_run_deadline + ,job_last_executed = :job_last_executed + ,job_consecutive_failures = :job_consecutive_failures + ,job_last_failure_error = :job_last_failure_error + WHERE job_uid = :job_uid` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object for update") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update job execution") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrResourceNotFound + } + + return nil +} + +func (s *JobStore) UpdateProgress(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + UPDATE jobs + SET + job_updated = :job_updated + ,job_result = :job_result + ,job_run_progress = :job_run_progress + WHERE job_uid = :job_uid AND job_state = 'running'` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object for update") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update job progress") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrResourceNotFound + } + + return nil +} + +// CountRunning returns number of jobs that are currently being run. +func (s *JobStore) CountRunning(ctx context.Context) (int, error) { + stmt := database.Builder. + Select("count(*)"). + From("jobs"). + Where("job_state = ?", enum.JobStateRunning) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert count running jobs query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed executing count running jobs query") + } + + return int(count), nil +} + +// ListReady returns a list of jobs that are ready for execution: +// The jobs with state="scheduled" and scheduled time in the past. +func (s *JobStore) ListReady(ctx context.Context, now time.Time, limit int) ([]*types.Job, error) { + stmt := database.Builder. + Select(jobColumns). + From("jobs"). + Where("job_state = ?", enum.JobStateScheduled). + Where("job_scheduled <= ?", now.UnixMilli()). + OrderBy("job_priority desc, job_scheduled asc, job_uid asc"). + Limit(uint64(limit)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert list scheduled jobs query to sql: %w", err) + } + + result := make([]*types.Job, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &result, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to execute list scheduled jobs query") + } + + return result, nil +} + +// ListDeadlineExceeded returns a list of jobs that have exceeded their execution deadline. +func (s *JobStore) ListDeadlineExceeded(ctx context.Context, now time.Time) ([]*types.Job, error) { + stmt := database.Builder. + Select(jobColumns). + From("jobs"). + Where("job_state = ?", enum.JobStateRunning). + Where("job_run_deadline < ?", now.UnixMilli()). + OrderBy("job_run_deadline asc") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert list overdue jobs query to sql: %w", err) + } + + result := make([]*types.Job, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &result, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to execute list overdue jobs query") + } + + return result, nil +} + +// NextScheduledTime returns a scheduled time of the next ready job or zero time if no such job exists. +func (s *JobStore) NextScheduledTime(ctx context.Context, now time.Time) (time.Time, error) { + stmt := database.Builder. + Select("job_scheduled"). + From("jobs"). + Where("job_state = ?", enum.JobStateScheduled). + Where("job_scheduled > ?", now.UnixMilli()). + OrderBy("job_scheduled asc"). + Limit(1) + + query, args, err := stmt.ToSql() + if err != nil { + return time.Time{}, fmt.Errorf("failed to convert next scheduled time query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var result int64 + + err = db.QueryRowContext(ctx, query, args...).Scan(&result) + if errors.Is(err, sql.ErrNoRows) { + return time.Time{}, nil + } + if err != nil { + return time.Time{}, database.ProcessSQLErrorf(err, "failed to execute next scheduled time query") + } + + return time.UnixMilli(result), nil +} + +// DeleteOld removes non-recurring jobs that have finished execution or have failed. +func (s *JobStore) DeleteOld(ctx context.Context, olderThan time.Time) (int64, error) { + stmt := database.Builder. + Delete("jobs"). + Where("(job_state = ? OR job_state = ?)", enum.JobStateFinished, enum.JobStateFailed). + Where("job_is_recurring = false"). + Where("job_last_executed < ?", olderThan.UnixMilli()) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert delete done jobs query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + result, err := db.ExecContext(ctx, sql, args...) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed to execute delete done jobs query") + } + + n, err := result.RowsAffected() + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed to get number of deleted jobs") + } + + return n, nil +} diff --git a/internal/store/database/migrate/ci/ci_migrations.sql b/internal/store/database/migrate/ci/ci_migrations.sql index d69497388..4f25e4b24 100644 --- a/internal/store/database/migrate/ci/ci_migrations.sql +++ b/internal/store/database/migrate/ci/ci_migrations.sql @@ -1,6 +1,7 @@ DROP TABLE IF exists pipelines; DROP TABLE IF exists executions; DROP TABLE IF exists stages; +DROP TABLE IF exists secrets; DROP TABLE IF exists steps; DROP TABLE IF exists logs; DROP TABLE IF exists plugins; @@ -10,26 +11,17 @@ DROP TABLE IF exists triggers; CREATE TABLE pipelines ( pipeline_id INTEGER PRIMARY KEY AUTOINCREMENT ,pipeline_description TEXT NOT NULL - ,pipeline_space_id INTEGER NOT NULL ,pipeline_uid TEXT NOT NULL ,pipeline_seq INTEGER NOT NULL DEFAULT 0 ,pipeline_repo_id INTEGER NOT NULL - ,pipeline_repo_type TEXT NOT NULL - ,pipeline_repo_name TEXT NOT NULL ,pipeline_default_branch TEXT NOT NULL ,pipeline_config_path TEXT NOT NULL ,pipeline_created INTEGER NOT NULL ,pipeline_updated INTEGER NOT NULL ,pipeline_version INTEGER NOT NULL - -- Ensure unique combination of UID and ParentID - ,UNIQUE (pipeline_space_id, pipeline_uid) - - -- Foreign key to spaces table - ,CONSTRAINT fk_pipeline_space_id FOREIGN KEY (pipeline_space_id) - REFERENCES spaces (space_id) MATCH SIMPLE - ON UPDATE NO ACTION - ON DELETE CASCADE + -- Ensure unique combination of UID and repo ID + ,UNIQUE (pipeline_repo_id, pipeline_uid) -- Foreign key to repositories table ,CONSTRAINT fk_pipelines_repo_id FOREIGN KEY (pipeline_repo_id) @@ -198,23 +190,32 @@ CREATE TABLE logs ( -- Insert some pipelines INSERT INTO pipelines ( - pipeline_id, pipeline_description, pipeline_space_id, pipeline_uid, pipeline_seq, - pipeline_repo_id, pipeline_repo_type, pipeline_repo_name, pipeline_default_branch, + pipeline_id, pipeline_description, pipeline_uid, pipeline_seq, + pipeline_repo_id, pipeline_default_branch, pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version ) VALUES ( - 1, 'Sample Pipeline 1', 1, 'pipeline_uid_1', 2, 1, 'git', 'sample_repo_1', + 1, 'Sample Pipeline 1', 'pipeline_uid_1', 2, 1, 'main', 'config_path_1', 1678932000, 1678932100, 1 ); INSERT INTO pipelines ( - pipeline_id, pipeline_description, pipeline_space_id, pipeline_uid, pipeline_seq, - pipeline_repo_id, pipeline_repo_type, pipeline_repo_name, pipeline_default_branch, + pipeline_id, pipeline_description, pipeline_uid, pipeline_seq, + pipeline_repo_id, pipeline_default_branch, pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version ) VALUES ( - 2, 'Sample Pipeline 2', 1, 'pipeline_uid_2', 0, 1, 'git', 'sample_repo_2', + 2, 'Sample Pipeline 2', 'pipeline_uid_2', 0, 1, 'develop', 'config_path_2', 1678932200, 1678932300, 1 ); +INSERT INTO pipelines ( + pipeline_id, pipeline_description, pipeline_uid, pipeline_seq, + pipeline_repo_id, pipeline_default_branch, + pipeline_config_path, pipeline_created, pipeline_updated, pipeline_version +) VALUES ( + 3, 'Sample Pipeline 3', 'pipeline_uid_3', 0, 1, + 'develop', 'config_path_2', 1678932200000, 1678932300000, 1 +); + -- Insert some executions INSERT INTO executions ( execution_id, execution_pipeline_id, execution_repo_id, execution_trigger, @@ -296,6 +297,26 @@ INSERT INTO executions ( 'production', 5, 0, 1678932500, 1678932600, 1678932700, 1678932800, 1 ); +INSERT INTO executions ( + execution_id, execution_pipeline_id, execution_repo_id, execution_trigger, + execution_number, execution_parent, execution_status, execution_error, + execution_event, execution_action, execution_link, execution_timestamp, + execution_title, execution_message, execution_before, execution_after, + execution_ref, execution_source_repo, execution_source, execution_target, + execution_author, execution_author_name, execution_author_email, + execution_author_avatar, execution_sender, execution_params, execution_cron, + execution_deploy, execution_deploy_id, execution_debug, execution_started, + execution_finished, execution_created, execution_updated, execution_version +) VALUES ( + 5, 2, 1, 'manual', 3, 0, 'running', '', 'push', 'created', + 'https://example.com/pipelines/1', 1678932400, 'Pipeline Execution 1', + 'Pipeline execution message...', 'commit_hash_before', 'commit_hash_after', + 'refs/heads/main', 'source_repo_name', 'source_branch', 'target_branch', + 'author_login', 'Author Name', 'author@example.com', 'https://example.com/avatar.jpg', + 'sender_username', '{"param1": "value1", "param2": "value2"}', '0 0 * * *', + 'production', 5, 0, 1678932500, 1678932600, 1678932700, 1678932800, 1 +); + -- Insert some stages INSERT INTO stages (stage_id, stage_execution_id, stage_number, stage_parent_group_id, stage_kind, stage_type, stage_name, stage_status, stage_error, stage_errignore, stage_exit_code, stage_limit, stage_os, stage_arch, stage_variant, stage_kernel, stage_machine, stage_started, stage_stopped, stage_created, stage_updated, stage_version, stage_on_success, stage_on_failure, stage_depends_on, stage_labels, stage_limit_repo) VALUES ( diff --git a/internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql b/internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql new file mode 100644 index 000000000..7c5d0fba0 --- /dev/null +++ b/internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +DROP INDEX jobs_run_deadline; +DROP INDEX jobs_scheduled; +DROP TABLE jobs; diff --git a/internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql b/internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql new file mode 100644 index 000000000..0f6d6f60d --- /dev/null +++ b/internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE jobs ( + job_uid TEXT NOT NULL +,job_created BIGINT NOT NULL +,job_updated BIGINT NOT NULL +,job_type TEXT NOT NULL +,job_priority INTEGER NOT NULL +,job_data TEXT NOT NULL +,job_result TEXT NOT NULL +,job_max_duration_seconds INTEGER NOT NULL +,job_max_retries INTEGER NOT NULL +,job_state TEXT NOT NULL +,job_scheduled BIGINT NOT NULL +,job_total_executions INTEGER +,job_run_by TEXT NOT NULL +,job_run_deadline BIGINT +,job_run_progress INTEGER NOT NULL +,job_last_executed BIGINT +,job_is_recurring BOOLEAN NOT NULL +,job_recurring_cron TEXT NOT NULL +,job_consecutive_failures INTEGER NOT NULL +,job_last_failure_error TEXT NOT NULL +,CONSTRAINT pk_jobs_uid PRIMARY KEY (job_uid) +); + +CREATE INDEX jobs_scheduled + ON jobs(job_scheduled) + WHERE job_state = 'scheduled'; + +CREATE INDEX jobs_run_deadline + ON jobs(job_run_deadline) + WHERE job_state = 'running'; + +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_state = 'finished' OR job_state = 'failed'; diff --git a/internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql b/internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql new file mode 100644 index 000000000..7c5d0fba0 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +DROP INDEX jobs_run_deadline; +DROP INDEX jobs_scheduled; +DROP TABLE jobs; diff --git a/internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql b/internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql new file mode 100644 index 000000000..9b625f378 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE jobs ( + job_uid TEXT NOT NULL +,job_created BIGINT NOT NULL +,job_updated BIGINT NOT NULL +,job_type TEXT NOT NULL +,job_priority INTEGER NOT NULL +,job_data TEXT NOT NULL +,job_result TEXT NOT NULL +,job_max_duration_seconds INTEGER NOT NULL +,job_max_retries INTEGER NOT NULL +,job_state TEXT NOT NULL +,job_scheduled BIGINT NOT NULL +,job_total_executions INTEGER +,job_run_by TEXT NOT NULL +,job_run_deadline BIGINT +,job_run_progress INTEGER NOT NULL +,job_last_executed BIGINT +,job_is_recurring BOOLEAN NOT NULL +,job_recurring_cron TEXT NOT NULL +,job_consecutive_failures INTEGER NOT NULL +,job_last_failure_error TEXT NOT NULL +,CONSTRAINT pk_jobs_uid PRIMARY KEY (job_uid) +); + +CREATE INDEX jobs_scheduled + ON jobs(job_scheduled) + WHERE job_state = 'scheduled'; + +CREATE INDEX jobs_run_deadline + ON jobs(job_run_deadline) + WHERE job_state = 'running'; + +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_state = 'finished' OR job_state = 'failed'; diff --git a/internal/store/database/pipeline.go b/internal/store/database/pipeline.go index 0aa333ead..9b0c804af 100644 --- a/internal/store/database/pipeline.go +++ b/internal/store/database/pipeline.go @@ -31,12 +31,9 @@ const ( pipelineColumns = ` pipeline_id ,pipeline_description - ,pipeline_space_id ,pipeline_uid ,pipeline_seq ,pipeline_repo_id - ,pipeline_repo_type - ,pipeline_repo_name ,pipeline_default_branch ,pipeline_config_path ,pipeline_created @@ -69,14 +66,14 @@ func (s *pipelineStore) Find(ctx context.Context, id int64) (*types.Pipeline, er return dst, nil } -// FindByUID returns a pipeline in a given space with a given UID. -func (s *pipelineStore) FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Pipeline, error) { +// FindByUID returns a pipeline for a given repo with a given UID. +func (s *pipelineStore) FindByUID(ctx context.Context, repoID int64, uid string) (*types.Pipeline, error) { const findQueryStmt = pipelineQueryBase + ` - WHERE pipeline_space_id = $1 AND pipeline_uid = $2` + WHERE pipeline_repo_id = $1 AND pipeline_uid = $2` db := dbtx.GetAccessor(ctx, s.db) dst := new(types.Pipeline) - if err := db.GetContext(ctx, dst, findQueryStmt, spaceID, uid); err != nil { + if err := db.GetContext(ctx, dst, findQueryStmt, repoID, uid); err != nil { return nil, database.ProcessSQLErrorf(err, "Failed to find pipeline") } return dst, nil @@ -87,12 +84,9 @@ func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) er const pipelineInsertStmt = ` INSERT INTO pipelines ( pipeline_description - ,pipeline_space_id ,pipeline_uid ,pipeline_seq ,pipeline_repo_id - ,pipeline_repo_type - ,pipeline_repo_name ,pipeline_default_branch ,pipeline_config_path ,pipeline_created @@ -100,12 +94,9 @@ func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) er ,pipeline_version ) VALUES ( :pipeline_description, - :pipeline_space_id, :pipeline_uid, :pipeline_seq, :pipeline_repo_id, - :pipeline_repo_type, - :pipeline_repo_name, :pipeline_default_branch, :pipeline_config_path, :pipeline_created, @@ -171,16 +162,16 @@ func (s *pipelineStore) Update(ctx context.Context, p *types.Pipeline) error { return nil } -// List lists all the pipelines present in a space. +// List lists all the pipelines for a repository. func (s *pipelineStore) List( ctx context.Context, - parentID int64, + repoID int64, filter types.ListQueryFilter, ) ([]*types.Pipeline, error) { stmt := database.Builder. Select(pipelineColumns). From("pipelines"). - Where("pipeline_space_id = ?", fmt.Sprint(parentID)) + Where("pipeline_repo_id = ?", fmt.Sprint(repoID)) if filter.Query != "" { stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) @@ -204,6 +195,78 @@ func (s *pipelineStore) List( return dst, nil } +// ListLatest lists all the pipelines under a repository with information +// about the latest build if available. +func (s *pipelineStore) ListLatest( + ctx context.Context, + repoID int64, + filter types.ListQueryFilter, +) ([]*types.Pipeline, error) { + const pipelineExecutionColumns = pipelineColumns + ` + ,executions.execution_id + ,executions.execution_pipeline_id + ,execution_repo_id + ,execution_trigger + ,execution_number + ,execution_status + ,execution_error + ,execution_link + ,execution_timestamp + ,execution_title + ,execution_author + ,execution_author_name + ,execution_author_email + ,execution_author_avatar + ,execution_source + ,execution_target + ,execution_source_repo + ,execution_started + ,execution_finished + ,execution_created + ,execution_updated + ` + // Create a subquery to get max execution IDs for each unique execution pipeline ID. + subquery := database.Builder. + Select("execution_pipeline_id, execution_id, MAX(execution_number)"). + From("executions"). + Where("execution_repo_id = ?"). + GroupBy("execution_pipeline_id") + + // Convert the subquery to SQL. + subquerySQL, _, err := subquery.ToSql() + if err != nil { + return nil, err + } + + // Left join the previous table with executions and pipelines table. + stmt := database.Builder. + Select(pipelineExecutionColumns). + From("pipelines"). + LeftJoin("("+subquerySQL+") AS max_executions ON pipelines.pipeline_id = max_executions.execution_pipeline_id"). + LeftJoin("executions ON executions.execution_id = max_executions.execution_id"). + Where("pipeline_repo_id = ?", fmt.Sprint(repoID)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*pipelineExecutionJoin{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return convert(dst), nil +} + // UpdateOptLock updates the pipeline using the optimistic locking mechanism. func (s *pipelineStore) UpdateOptLock(ctx context.Context, pipeline *types.Pipeline, @@ -231,12 +294,12 @@ func (s *pipelineStore) UpdateOptLock(ctx context.Context, } } -// Count of pipelines in a space. -func (s *pipelineStore) Count(ctx context.Context, parentID int64, filter types.ListQueryFilter) (int64, error) { +// Count of pipelines under a repo. +func (s *pipelineStore) Count(ctx context.Context, repoID int64, filter types.ListQueryFilter) (int64, error) { stmt := database.Builder. Select("count(*)"). From("pipelines"). - Where("pipeline_space_id = ?", parentID) + Where("pipeline_repo_id = ?", repoID) if filter.Query != "" { stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) @@ -272,15 +335,15 @@ func (s *pipelineStore) Delete(ctx context.Context, id int64) error { return nil } -// DeleteByUID deletes a pipeline with a given UID in a space. -func (s *pipelineStore) DeleteByUID(ctx context.Context, spaceID int64, uid string) error { +// DeleteByUID deletes a pipeline with a given UID under a given repo. +func (s *pipelineStore) DeleteByUID(ctx context.Context, repoID int64, uid string) error { const pipelineDeleteStmt = ` DELETE FROM pipelines - WHERE pipeline_space_id = $1 AND pipeline_uid = $2` + WHERE pipeline_repo_id = $1 AND pipeline_uid = $2` db := dbtx.GetAccessor(ctx, s.db) - if _, err := db.ExecContext(ctx, pipelineDeleteStmt, spaceID, uid); err != nil { + if _, err := db.ExecContext(ctx, pipelineDeleteStmt, repoID, uid); err != nil { return database.ProcessSQLErrorf(err, "Could not delete pipeline") } diff --git a/internal/store/database/pipeline_join.go b/internal/store/database/pipeline_join.go new file mode 100644 index 000000000..a7e212a2a --- /dev/null +++ b/internal/store/database/pipeline_join.go @@ -0,0 +1,77 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package database + +import ( + "database/sql" + + "github.com/harness/gitness/types" +) + +// pipelineExecutionjoin struct represents a joined row between pipelines and executions +type pipelineExecutionJoin struct { + *types.Pipeline + ID sql.NullInt64 `db:"execution_id"` + PipelineID sql.NullInt64 `db:"execution_pipeline_id"` + RepoID sql.NullInt64 `db:"execution_repo_id"` + Trigger sql.NullString `db:"execution_trigger"` + Number sql.NullInt64 `db:"execution_number"` + Status sql.NullString `db:"execution_status"` + Error sql.NullString `db:"execution_error"` + Link sql.NullString `db:"execution_link"` + Timestamp sql.NullInt64 `db:"execution_timestamp"` + Title sql.NullString `db:"execution_title"` + Fork sql.NullString `db:"execution_source_repo"` + Source sql.NullString `db:"execution_source"` + Target sql.NullString `db:"execution_target"` + Author sql.NullString `db:"execution_author"` + AuthorName sql.NullString `db:"execution_author_name"` + AuthorEmail sql.NullString `db:"execution_author_email"` + AuthorAvatar sql.NullString `db:"execution_author_avatar"` + Started sql.NullInt64 `db:"execution_started"` + Finished sql.NullInt64 `db:"execution_finished"` + Created sql.NullInt64 `db:"execution_created"` + Updated sql.NullInt64 `db:"execution_updated"` +} + +func convert(rows []*pipelineExecutionJoin) []*types.Pipeline { + pipelines := []*types.Pipeline{} + for _, k := range rows { + pipeline := convertPipelineJoin(k) + pipelines = append(pipelines, pipeline) + } + return pipelines +} + +func convertPipelineJoin(join *pipelineExecutionJoin) *types.Pipeline { + ret := join.Pipeline + if !join.ID.Valid { + return ret + } + ret.Execution = &types.Execution{ + ID: join.ID.Int64, + PipelineID: join.PipelineID.Int64, + RepoID: join.RepoID.Int64, + Trigger: join.Trigger.String, + Number: join.Number.Int64, + Status: join.Status.String, + Error: join.Error.String, + Link: join.Link.String, + Timestamp: join.Timestamp.Int64, + Title: join.Title.String, + Fork: join.Fork.String, + Source: join.Source.String, + Target: join.Target.String, + Author: join.Author.String, + AuthorName: join.AuthorName.String, + AuthorEmail: join.AuthorEmail.String, + AuthorAvatar: join.AuthorAvatar.String, + Started: join.Started.Int64, + Finished: join.Finished.Int64, + Created: join.Created.Int64, + Updated: join.Updated.Int64, + } + return ret +} diff --git a/internal/store/database/webhook.go b/internal/store/database/webhook.go index f05e1c230..a50a7677d 100644 --- a/internal/store/database/webhook.go +++ b/internal/store/database/webhook.go @@ -357,6 +357,7 @@ func mapToWebhook(hook *webhook) (*types.Webhook, error) { Insecure: hook.Insecure, Triggers: triggersFromString(hook.Triggers), LatestExecutionResult: (*enum.WebhookExecutionResult)(hook.LatestExecutionResult.Ptr()), + Internal: hook.Internal, } switch { @@ -390,6 +391,7 @@ func mapToInternalWebhook(hook *types.Webhook) (*webhook, error) { Insecure: hook.Insecure, Triggers: triggersToString(hook.Triggers), LatestExecutionResult: null.StringFromPtr((*string)(hook.LatestExecutionResult)), + Internal: hook.Internal, } switch hook.ParentType { diff --git a/internal/store/database/wire.go b/internal/store/database/wire.go index a51f1b7e7..a1a969d41 100644 --- a/internal/store/database/wire.go +++ b/internal/store/database/wire.go @@ -23,6 +23,7 @@ var WireSet = wire.NewSet( ProvidePathStore, ProvideSpaceStore, ProvideRepoStore, + ProvideJobStore, ProvideExecutionStore, ProvidePipelineStore, ProvideStageStore, @@ -87,6 +88,11 @@ func ProvideRepoStore(db *sqlx.DB, pathCache store.PathCache) store.RepoStore { return NewRepoStore(db, pathCache) } +// ProvideJobStore provides a job store. +func ProvideJobStore(db *sqlx.DB) store.JobStore { + return NewJobStore(db) +} + // ProvidePipelineStore provides a pipeline store. func ProvidePipelineStore(db *sqlx.DB) store.PipelineStore { return NewPipelineStore(db) diff --git a/mocks/mock_client.go b/mocks/mock_client.go index 3cb746055..b16a6490e 100644 --- a/mocks/mock_client.go +++ b/mocks/mock_client.go @@ -8,10 +8,9 @@ import ( context "context" reflect "reflect" + gomock "github.com/golang/mock/gomock" user "github.com/harness/gitness/internal/api/controller/user" types "github.com/harness/gitness/types" - - gomock "github.com/golang/mock/gomock" ) // MockClient is a mock of Client interface. diff --git a/mocks/mock_store.go b/mocks/mock_store.go index 9310f3729..0af0bbc34 100644 --- a/mocks/mock_store.go +++ b/mocks/mock_store.go @@ -8,10 +8,9 @@ import ( context "context" reflect "reflect" + gomock "github.com/golang/mock/gomock" types "github.com/harness/gitness/types" enum "github.com/harness/gitness/types/enum" - - gomock "github.com/golang/mock/gomock" ) // MockPrincipalStore is a mock of PrincipalStore interface. diff --git a/types/config.go b/types/config.go index 81c388072..42608d4b7 100644 --- a/types/config.go +++ b/types/config.go @@ -180,4 +180,13 @@ type Config struct { SendTimeout time.Duration `envconfig:"GITNESS_PUBSUB_SEND_TIMEOUT" default:"60s"` ChannelSize int `envconfig:"GITNESS_PUBSUB_CHANNEL_SIZE" default:"100"` } + + BackgroundJobs struct { + // MaxRunning is maximum number of jobs that can be running at once. + MaxRunning int `envconfig:"GITNESS_JOBS_MAX_RUNNING" default:"10"` + + // PurgeFinishedOlderThan is duration after non-recurring, + // finished and failed jobs will be purged from the DB. + PurgeFinishedOlderThan time.Duration `envconfig:"GITNESS_JOBS_PURGE_FINISHED_OLDER_THAN" default:"120h"` + } } diff --git a/types/enum/job.go b/types/enum/job.go new file mode 100644 index 000000000..a92a060da --- /dev/null +++ b/types/enum/job.go @@ -0,0 +1,25 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package enum + +// JobState represents state of a background job. +type JobState string + +// JobState enumeration. +const ( + JobStateScheduled JobState = "scheduled" + JobStateRunning JobState = "running" + JobStateFinished JobState = "finished" + JobStateFailed JobState = "failed" +) + +// JobPriority represents priority of a background job. +type JobPriority int + +// JobPriority enumeration. +const ( + JobPriorityNormal JobPriority = 0 + JobPriorityElevated JobPriority = 1 +) diff --git a/types/enum/membership_role.go b/types/enum/membership_role.go index acc393b08..4cce51bde 100644 --- a/types/enum/membership_role.go +++ b/types/enum/membership_role.go @@ -31,7 +31,7 @@ var membershipRoleReaderPermissions = slices.Clip(slices.Insert([]Permission{}, )) var membershipRoleExecutorPermissions = slices.Clip(slices.Insert(membershipRoleReaderPermissions, 0, - PermissionCommitCheckReport, + PermissionRepoReportCommitCheck, PermissionPipelineExecute, PermissionSecretAccess, PermissionConnectorAccess, @@ -46,7 +46,7 @@ var membershipRoleSpaceOwnerPermissions = slices.Clip(slices.Insert(membershipRo PermissionRepoEdit, PermissionRepoDelete, PermissionRepoPush, - PermissionCommitCheckReport, + PermissionRepoReportCommitCheck, PermissionSpaceEdit, PermissionSpaceCreate, diff --git a/types/enum/permission.go b/types/enum/permission.go index 2a6651803..962627ed4 100644 --- a/types/enum/permission.go +++ b/types/enum/permission.go @@ -17,7 +17,6 @@ const ( ResourceTypeSecret ResourceType = "SECRET" ResourceTypeConnector ResourceType = "CONNECTOR" ResourceTypeTemplate ResourceType = "TEMPLATE" - // ResourceType_Branch ResourceType = "BRANCH" ) // Permission represents the different types of permissions a principal can have. @@ -37,10 +36,11 @@ const ( /* ----- REPOSITORY ----- */ - PermissionRepoView Permission = "repo_view" - PermissionRepoEdit Permission = "repo_edit" - PermissionRepoDelete Permission = "repo_delete" - PermissionRepoPush Permission = "repo_push" + PermissionRepoView Permission = "repo_view" + PermissionRepoEdit Permission = "repo_edit" + PermissionRepoDelete Permission = "repo_delete" + PermissionRepoPush Permission = "repo_push" + PermissionRepoReportCommitCheck Permission = "repo_reportCommitCheck" ) const ( @@ -114,10 +114,3 @@ const ( PermissionTemplateDelete Permission = "template_delete" PermissionTemplateAccess Permission = "template_access" ) - -const ( - /* - ----- COMMIT CHECK ----- - */ - PermissionCommitCheckReport Permission = "commitCheck_report" -) diff --git a/types/job.go b/types/job.go new file mode 100644 index 000000000..e5ddb9cee --- /dev/null +++ b/types/job.go @@ -0,0 +1,38 @@ +// Copyright 2022 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package types + +import "github.com/harness/gitness/types/enum" + +type Job struct { + UID string `db:"job_uid"` + Created int64 `db:"job_created"` + Updated int64 `db:"job_updated"` + Type string `db:"job_type"` + Priority enum.JobPriority `db:"job_priority"` + Data string `db:"job_data"` + Result string `db:"job_result"` + MaxDurationSeconds int `db:"job_max_duration_seconds"` + MaxRetries int `db:"job_max_retries"` + State enum.JobState `db:"job_state"` + Scheduled int64 `db:"job_scheduled"` + TotalExecutions int `db:"job_total_executions"` + RunBy string `db:"job_run_by"` + RunDeadline int64 `db:"job_run_deadline"` + RunProgress int `db:"job_run_progress"` + LastExecuted int64 `db:"job_last_executed"` + IsRecurring bool `db:"job_is_recurring"` + RecurringCron string `db:"job_recurring_cron"` + ConsecutiveFailures int `db:"job_consecutive_failures"` + LastFailureError string `db:"job_last_failure_error"` +} + +type JobStateChange struct { + UID string `json:"uid"` + State enum.JobState `json:"state"` + Progress int `json:"progress"` + Result string `json:"result"` + Failure string `json:"failure"` +} diff --git a/types/pipeline.go b/types/pipeline.go index 019bf99a1..c3256f28d 100644 --- a/types/pipeline.go +++ b/types/pipeline.go @@ -4,20 +4,16 @@ package types -import "github.com/harness/gitness/types/enum" - type Pipeline struct { - ID int64 `db:"pipeline_id" json:"id"` - Description string `db:"pipeline_description" json:"description"` - SpaceID int64 `db:"pipeline_space_id" json:"space_id"` - UID string `db:"pipeline_uid" json:"uid"` - Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline - RepoID int64 `db:"pipeline_repo_id" json:"repo_id"` // null if repo_type != gitness - RepoType enum.ScmType `db:"pipeline_repo_type" json:"repo_type"` - RepoName string `db:"pipeline_repo_name" json:"repo_name"` - DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"` - ConfigPath string `db:"pipeline_config_path" json:"config_path"` - Created int64 `db:"pipeline_created" json:"created"` - Updated int64 `db:"pipeline_updated" json:"updated"` - Version int64 `db:"pipeline_version" json:"version"` + ID int64 `db:"pipeline_id" json:"id"` + Description string `db:"pipeline_description" json:"description"` + UID string `db:"pipeline_uid" json:"uid"` + Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline + RepoID int64 `db:"pipeline_repo_id" json:"repo_id"` + DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"` + ConfigPath string `db:"pipeline_config_path" json:"config_path"` + Created int64 `db:"pipeline_created" json:"created"` + Execution *Execution `db:"-" json:"execution,omitempty"` // information about the latest execution if available + Updated int64 `db:"pipeline_updated" json:"updated"` + Version int64 `db:"pipeline_version" json:"-"` } diff --git a/web/src/RouteDefinitions.ts b/web/src/RouteDefinitions.ts index 291de2f29..d66aa42f9 100644 --- a/web/src/RouteDefinitions.ts +++ b/web/src/RouteDefinitions.ts @@ -45,8 +45,8 @@ export interface CODERoutes { toCODESpaceAccessControl: (args: Required>) => string toCODESpaceSettings: (args: Required>) => string - toCODEPipelines: (args: Required>) => string - toCODEPipelineEdit: (args: Required>) => string + toCODEPipelines: (args: Required>) => string + toCODEPipelineEdit: (args: Required>) => string toCODESecrets: (args: Required>) => string toCODEGlobalSettings: () => string @@ -74,8 +74,8 @@ export interface CODERoutes { toCODEWebhookDetails: (args: Required>) => string toCODESettings: (args: Required>) => string - toCODEExecutions: (args: Required>) => string - toCODEExecution: (args: Required>) => string + toCODEExecutions: (args: Required>) => string + toCODEExecution: (args: Required>) => string toCODESecret: (args: Required>) => string } @@ -96,8 +96,8 @@ export const routes: CODERoutes = { toCODESpaceAccessControl: ({ space }) => `/access-control/${space}`, toCODESpaceSettings: ({ space }) => `/settings/${space}`, - toCODEPipelines: ({ space }) => `/pipelines/${space}`, - toCODEPipelineEdit: ({ space, pipeline }) => `/pipelines/${space}/pipeline/${pipeline}/edit`, + toCODEPipelines: ({ repoPath }) => `/${repoPath}/pipelines`, + toCODEPipelineEdit: ({ repoPath, pipeline }) => `/${repoPath}/pipeline/${pipeline}/edit`, toCODESecrets: ({ space }) => `/secrets/${space}`, toCODEGlobalSettings: () => '/settings', @@ -130,8 +130,7 @@ export const routes: CODERoutes = { toCODEWebhookNew: ({ repoPath }) => `/${repoPath}/webhooks/new`, toCODEWebhookDetails: ({ repoPath, webhookId }) => `/${repoPath}/webhook/${webhookId}`, - toCODEExecutions: ({ space, pipeline }) => `/pipelines/${space}/pipeline/${pipeline}`, - toCODEExecution: ({ space, pipeline, execution }) => - `/pipelines/${space}/pipeline/${pipeline}/execution/${execution}`, + toCODEExecutions: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}`, + toCODEExecution: ({ repoPath, pipeline, execution }) => `/${repoPath}/pipelines/${pipeline}/execution/${execution}`, toCODESecret: ({ space, secret }) => `/secrets/${space}/secret/${secret}` } diff --git a/web/src/RouteDestinations.tsx b/web/src/RouteDestinations.tsx index 98b5884ca..551941694 100644 --- a/web/src/RouteDestinations.tsx +++ b/web/src/RouteDestinations.tsx @@ -167,7 +167,7 @@ export const RouteDestinations: React.FC = React.memo(function RouteDestinations {OPEN_SOURCE_PIPELINES && ( + @@ -195,7 +195,7 @@ export const RouteDestinations: React.FC = React.memo(function RouteDestinations )} {OPEN_SOURCE_PIPELINES && ( - + diff --git a/web/src/components/Changes/Changes.tsx b/web/src/components/Changes/Changes.tsx index c10b7f678..bee5ceccb 100644 --- a/web/src/components/Changes/Changes.tsx +++ b/web/src/components/Changes/Changes.tsx @@ -243,8 +243,8 @@ export const Changes: React.FC = ({ str={getString('pr.diffStatsLabel')} vars={{ changedFilesLink: , - addedLines: formatNumber(diffStats.addedLines), - deletedLines: formatNumber(diffStats.deletedLines), + addedLines: diffStats.addedLines ? formatNumber(diffStats.addedLines) : '0', + deletedLines: diffStats.deletedLines ? formatNumber(diffStats.deletedLines) : '0', configuration: ( = ({ setSelectedCommits }) => { const { getString } = useStrings() - const allCommitsSHA = allCommits.map(commit => commit.sha as string) useEffect(() => { @@ -63,7 +62,7 @@ const CommitRangeDropdown: React.FC = ({ event: React.MouseEvent, selectedCommitSHA: string ) => { - if (event.shiftKey || selectedCommits.length > 1) { + if (event.shiftKey) { // Select Commit setSelectedCommits(current => { if (current.includes(selectedCommitSHA)) { diff --git a/web/src/components/Console/Console.module.scss b/web/src/components/Console/Console.module.scss new file mode 100644 index 000000000..b7df1b8b6 --- /dev/null +++ b/web/src/components/Console/Console.module.scss @@ -0,0 +1,32 @@ +.container { + display: flex; + flex-direction: column; + background-color: black; + height: 100%; + overflow-y: auto; +} + +.log { + color: white; + font-family: Inconsolata, monospace; + font-size: 2rem; +} + +.header { + position: sticky; + top: 0; + background-color: var(--black); + height: var(--log-content-header-height); + + .headerLayout { + display: flex; + align-items: baseline; + border-bottom: 1px solid var(--grey-800); + padding: var(--spacing-medium) 0; + font-weight: 600; + } +} + +.steps { + padding: var(--spacing-medium) !important; +} diff --git a/web/src/components/Console/Console.module.scss.d.ts b/web/src/components/Console/Console.module.scss.d.ts new file mode 100644 index 000000000..5e3d1c140 --- /dev/null +++ b/web/src/components/Console/Console.module.scss.d.ts @@ -0,0 +1,10 @@ +/* eslint-disable */ +// this is an auto-generated file +declare const styles: { + readonly container: string + readonly log: string + readonly header: string + readonly headerLayout: string + readonly steps: string +} +export default styles diff --git a/web/src/components/Console/Console.tsx b/web/src/components/Console/Console.tsx new file mode 100644 index 000000000..263f0276d --- /dev/null +++ b/web/src/components/Console/Console.tsx @@ -0,0 +1,51 @@ +import React, { FC } from 'react' +import { useParams } from 'react-router-dom' +import { Container, Layout, Text } from '@harnessio/uicore' +import { Color, FontVariation } from '@harnessio/design-system' +import type { CODEProps } from 'RouteDefinitions' +import type { TypesStage } from 'services/code' +import ConsoleStep from 'components/ConsoleStep/ConsoleStep' +import { timeDistance } from 'utils/Utils' +// import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import css from './Console.module.scss' + +interface ConsoleProps { + stage: TypesStage | undefined + repoPath: string +} + +const Console: FC = ({ stage, repoPath }) => { + const { pipeline, execution: executionNum } = useParams() + + return ( +
+ + + + {stage?.name} + + {stage?.started && stage?.stopped && ( + + {/* this needs fixed */} + {timeDistance(stage?.started, stage?.stopped)} + + )} + + + + {stage?.steps?.map((step, index) => ( + + ))} + +
+ ) +} + +export default Console diff --git a/web/src/components/ConsoleLogs/ConsoleLogs.module.scss b/web/src/components/ConsoleLogs/ConsoleLogs.module.scss new file mode 100644 index 000000000..f5aa335fa --- /dev/null +++ b/web/src/components/ConsoleLogs/ConsoleLogs.module.scss @@ -0,0 +1,16 @@ +.logLayout { + margin-left: 2.3rem !important; +} + +.lineNumber { + width: 1.5rem; + color: #999; + margin-right: 1rem; + font-family: 'Roboto Mono' !important; +} + +.log { + color: white !important; + margin-bottom: 1rem; + font-family: 'Roboto Mono' !important; +} diff --git a/web/src/components/ConsoleLogs/ConsoleLogs.module.scss.d.ts b/web/src/components/ConsoleLogs/ConsoleLogs.module.scss.d.ts new file mode 100644 index 000000000..898a53cd0 --- /dev/null +++ b/web/src/components/ConsoleLogs/ConsoleLogs.module.scss.d.ts @@ -0,0 +1,8 @@ +/* eslint-disable */ +// this is an auto-generated file +declare const styles: { + readonly logLayout: string + readonly lineNumber: string + readonly log: string +} +export default styles diff --git a/web/src/components/ConsoleLogs/ConsoleLogs.tsx b/web/src/components/ConsoleLogs/ConsoleLogs.tsx new file mode 100644 index 000000000..7d4dffe84 --- /dev/null +++ b/web/src/components/ConsoleLogs/ConsoleLogs.tsx @@ -0,0 +1,36 @@ +import { Layout, Text } from '@harnessio/uicore' +import React, { FC } from 'react' +import css from './ConsoleLogs.module.scss' + +// currently a string - should be an array of strings in future +interface ConsoleLogsProps { + logs: string +} + +interface log { + pos: number + out: string + time: number +} + +const convertStringToLogArray = (logs: string): log[] => { + const logStrings = logs.split('\n').map(log => { + return JSON.parse(log) + }) + + return logStrings +} + +const ConsoleLogs: FC = ({ logs }) => { + const logArray = convertStringToLogArray(logs) + return logArray.map((log, index) => { + return ( + + {log.pos} + {log.out} + + ) + }) +} + +export default ConsoleLogs diff --git a/web/src/components/ConsoleStep/ConsoleStep.module.scss b/web/src/components/ConsoleStep/ConsoleStep.module.scss new file mode 100644 index 000000000..966563a30 --- /dev/null +++ b/web/src/components/ConsoleStep/ConsoleStep.module.scss @@ -0,0 +1,10 @@ +.stepLayout { + display: flex; + align-items: center; + cursor: pointer; +} + +.loading { + margin-left: 2.3rem !important; + margin-top: 1.5rem !important; +} diff --git a/web/src/components/ConsoleStep/ConsoleStep.module.scss.d.ts b/web/src/components/ConsoleStep/ConsoleStep.module.scss.d.ts new file mode 100644 index 000000000..7f68607e5 --- /dev/null +++ b/web/src/components/ConsoleStep/ConsoleStep.module.scss.d.ts @@ -0,0 +1,7 @@ +/* eslint-disable */ +// this is an auto-generated file +declare const styles: { + readonly stepLayout: string + readonly loading: string +} +export default styles diff --git a/web/src/components/ConsoleStep/ConsoleStep.tsx b/web/src/components/ConsoleStep/ConsoleStep.tsx new file mode 100644 index 000000000..b09c5f5f5 --- /dev/null +++ b/web/src/components/ConsoleStep/ConsoleStep.tsx @@ -0,0 +1,67 @@ +import { Icon } from '@harnessio/icons' +import { FlexExpander, Layout } from '@harnessio/uicore' +import React, { FC, useEffect } from 'react' +import { useGet } from 'restful-react' +import { Text } from '@harnessio/uicore' +import type { TypesStep } from 'services/code' +import { timeDistance } from 'utils/Utils' +import ConsoleLogs from 'components/ConsoleLogs/ConsoleLogs' +import { useStrings } from 'framework/strings' +import css from './ConsoleStep.module.scss' + +interface ConsoleStepProps { + step: TypesStep | undefined + stageNumber: number | undefined + repoPath: string + pipelineName: string | undefined + executionNumber: number +} + +const ConsoleStep: FC = ({ step, stageNumber, repoPath, pipelineName, executionNumber }) => { + const { getString } = useStrings() + + const [isOpened, setIsOpened] = React.useState(false) + + const { data, error, loading, refetch } = useGet({ + path: `/api/v1/repos/${repoPath}/+/pipelines/${pipelineName}/executions/${executionNumber}/logs/${String( + stageNumber + )}/${String(step?.number)}`, + lazy: true + }) + + // this refetches any open steps when the stage number changes - really it shouldnt refetch until reopened... + useEffect(() => { + setIsOpened(false) + refetch() + }, [stageNumber, refetch]) + + return ( + <> + { + setIsOpened(!isOpened) + if (!data && !loading) refetch() + }}> + + + {step?.name} + + {step?.started && step?.stopped &&
{timeDistance(step?.stopped, step?.started)}
} +
+ + {isOpened ? ( + loading ? ( +
{getString('loading')}
+ ) : error ? ( +
Error: {error}
+ ) : data ? ( + + ) : null + ) : null} + + ) +} + +export default ConsoleStep diff --git a/web/src/components/CreateBranchModal/CreateBranchModal.tsx b/web/src/components/CreateBranchModal/CreateBranchModal.tsx index d2bdba67c..94976684c 100644 --- a/web/src/components/CreateBranchModal/CreateBranchModal.tsx +++ b/web/src/components/CreateBranchModal/CreateBranchModal.tsx @@ -13,7 +13,8 @@ import { useToaster, FormInput, Label, - ButtonVariation + ButtonVariation, + StringSubstitute } from '@harnessio/uicore' import { Icon } from '@harnessio/icons' import { FontVariation } from '@harnessio/design-system' @@ -77,7 +78,15 @@ export function useCreateBranchModal({ hideModal() onSuccess(response) if (showSuccessMessage) { - showSuccess(getString('branchCreated', { branch: name }), 5000) + showSuccess( + , + 5000 + ) } }) .catch(_error => { diff --git a/web/src/components/CreateTagModal/CreateTagModal.tsx b/web/src/components/CreateTagModal/CreateTagModal.tsx index ac8bda32f..813f1479e 100644 --- a/web/src/components/CreateTagModal/CreateTagModal.tsx +++ b/web/src/components/CreateTagModal/CreateTagModal.tsx @@ -13,7 +13,8 @@ import { useToaster, FormInput, Label, - ButtonVariation + ButtonVariation, + StringSubstitute } from '@harnessio/uicore' import { Icon } from '@harnessio/icons' import { FontVariation } from '@harnessio/design-system' @@ -77,7 +78,15 @@ export function useCreateTagModal({ hideModal() onSuccess(response) if (showSuccessMessage) { - showSuccess(getString('tagCreated', { tag: name }), 5000) + showSuccess( + , + 5000 + ) } }) .catch(_error => { diff --git a/web/src/components/ExecutionStageList/ExecutionStageList.module.scss b/web/src/components/ExecutionStageList/ExecutionStageList.module.scss new file mode 100644 index 000000000..097a1d1cc --- /dev/null +++ b/web/src/components/ExecutionStageList/ExecutionStageList.module.scss @@ -0,0 +1,42 @@ +.container { + display: flex; + flex-direction: column; + height: 100%; +} + +.menu { + --stage-title-height: 54px; + + width: 100%; + height: 100%; + + .menuItem { + margin: 0.5rem 0 0.5rem 1rem !important; + cursor: pointer; + + &:not(:last-child) { + border-bottom: 1px solid var(--grey-100); + } + + .layout { + display: flex; + align-items: center; + min-height: var(--stage-title-height); + padding: 0 var(--spacing-medium) 0 var(--spacing-medium); + + &.selected { + background-color: var(--primary-1); + + .uid { + color: var(--primary-7) !important; + } + } + + .uid { + color: var(--grey-700) !important; + font-weight: 600 !important; + font-size: 1rem !important; + } + } + } +} diff --git a/web/src/components/ExecutionStageList/ExecutionStageList.module.scss.d.ts b/web/src/components/ExecutionStageList/ExecutionStageList.module.scss.d.ts new file mode 100644 index 000000000..0980665f8 --- /dev/null +++ b/web/src/components/ExecutionStageList/ExecutionStageList.module.scss.d.ts @@ -0,0 +1,11 @@ +/* eslint-disable */ +// this is an auto-generated file +declare const styles: { + readonly container: string + readonly menu: string + readonly menuItem: string + readonly layout: string + readonly selected: string + readonly uid: string +} +export default styles diff --git a/web/src/components/ExecutionStageList/ExecutionStageList.tsx b/web/src/components/ExecutionStageList/ExecutionStageList.tsx new file mode 100644 index 000000000..e2c44a2c8 --- /dev/null +++ b/web/src/components/ExecutionStageList/ExecutionStageList.tsx @@ -0,0 +1,56 @@ +import React, { FC } from 'react' +import { Container, Layout, Text } from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import cx from 'classnames' +import type { TypesStage } from 'services/code' +import css from './ExecutionStageList.module.scss' + +interface ExecutionStageListProps { + stages: TypesStage[] + selectedStage: number | null + setSelectedStage: (selectedStep: number | null) => void +} + +interface ExecutionStageProps { + stage: TypesStage + isSelected?: boolean + selectedStage: number | null + setSelectedStage: (selectedStage: number | null) => void +} + +const ExecutionStage: FC = ({ stage, isSelected = false, setSelectedStage }) => { + return ( + { + setSelectedStage(stage.number || null) + }}> + + + + {stage.name} + + + + ) +} + +const ExecutionStageList: FC = ({ stages, setSelectedStage, selectedStage }) => { + return ( + + {stages.map((stage, index) => { + return ( + + ) + })} + + ) +} + +export default ExecutionStageList diff --git a/web/src/framework/strings/stringTypes.ts b/web/src/framework/strings/stringTypes.ts index 6a6893752..8eb522d35 100644 --- a/web/src/framework/strings/stringTypes.ts +++ b/web/src/framework/strings/stringTypes.ts @@ -169,9 +169,11 @@ export interface StringsMap { enterUser: string error: string error404Text: string + 'executions.description': string 'executions.name': string 'executions.newExecutionButton': string 'executions.noData': string + 'executions.time': string executor: string existingAccount: string expiration: string diff --git a/web/src/i18n/strings.en.yaml b/web/src/i18n/strings.en.yaml index c461f1a7a..5796f3388 100644 --- a/web/src/i18n/strings.en.yaml +++ b/web/src/i18n/strings.en.yaml @@ -146,7 +146,7 @@ branchSource: Branch source branchSourceDesc: Choose a source branch basedOn: Based on failedToCreateBranch: Failed to create Branch. Please try again. -branchCreated: Branch {{branch}} created. +branchCreated: Branch {branch} created. tagCreated: Tag {{tag}} created. confirmation: Confirmation deleteBranch: Delete Branches @@ -158,8 +158,8 @@ browseFiles: Browse Files compare: Compare commitString: 'Commit {commit}' repoDeleted: Repository {{repo}} deleted. -branchDeleted: Branch {{branch}} deleted. -tagDeleted: Tag {{tag}} deleted. +branchDeleted: Branch {branch} deleted. +tagDeleted: Tag {tag} deleted. failedToDeleteBranch: Failed to delete Branch. Please try again. createFile: Create __path__ updateFile: Update __path__ @@ -637,8 +637,10 @@ pipelines: run: Run pipeline executions: noData: There are no executions :( - newExecutionButton: New Execution + newExecutionButton: Run Pipeline name: Execution Name + description: Description + time: Time selectRange: Shift-click to select a range allCommits: All Commits secrets: diff --git a/web/src/icons/CodeFileFill.svg b/web/src/icons/CodeFileFill.svg index 7fb0d424b..666e07119 100644 --- a/web/src/icons/CodeFileFill.svg +++ b/web/src/icons/CodeFileFill.svg @@ -1,5 +1,4 @@ - - - - + + + diff --git a/web/src/icons/Submodules.svg b/web/src/icons/Submodules.svg new file mode 100644 index 000000000..1ab58b373 --- /dev/null +++ b/web/src/icons/Submodules.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/web/src/icons/Symlink.svg b/web/src/icons/Symlink.svg new file mode 100644 index 000000000..111a407a4 --- /dev/null +++ b/web/src/icons/Symlink.svg @@ -0,0 +1,4 @@ + + + + diff --git a/web/src/layouts/menu/DefaultMenu.tsx b/web/src/layouts/menu/DefaultMenu.tsx index a95066f14..4f84736ae 100644 --- a/web/src/layouts/menu/DefaultMenu.tsx +++ b/web/src/layouts/menu/DefaultMenu.tsx @@ -1,21 +1,19 @@ import React, { useMemo, useState } from 'react' import { Container, Layout } from '@harnessio/uicore' import { Render } from 'react-jsx-match' -import { useHistory, useRouteMatch, useParams } from 'react-router-dom' +import { useHistory, useRouteMatch } from 'react-router-dom' import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' import { useStrings } from 'framework/strings' import type { TypesSpace } from 'services/code' import { SpaceSelector } from 'components/SpaceSelector/SpaceSelector' import { useAppContext } from 'AppContext' import { useFeatureFlag } from 'hooks/useFeatureFlag' -import type { CODEProps } from 'RouteDefinitions' import { NavMenuItem } from './NavMenuItem' import css from './DefaultMenu.module.scss' export const DefaultMenu: React.FC = () => { const history = useHistory() const { routes } = useAppContext() - const params = useParams() const [selectedSpace, setSelectedSpace] = useState() const { repoMetadata, gitRef, commitRef } = useGetRepositoryMetadata() const { getString } = useStrings() @@ -26,7 +24,6 @@ export const DefaultMenu: React.FC = () => { [routeMatch] ) const isCommitSelected = useMemo(() => routeMatch.path === '/:space*/:repoName/commit/:commitRef*', [routeMatch]) - const isPipelineSelected = routeMatch.path.startsWith('/pipelines/:space*/pipeline/:pipeline') const { OPEN_SOURCE_PIPELINES, OPEN_SOURCE_SECRETS } = useFeatureFlag() return ( @@ -121,6 +118,17 @@ export const DefaultMenu: React.FC = () => { })} /> + {OPEN_SOURCE_PIPELINES && ( + + )} + { - {OPEN_SOURCE_PIPELINES && ( - - {/* icon is placeholder */} - - - )} - - {OPEN_SOURCE_PIPELINES && ( - - - - - - - - )} - {OPEN_SOURCE_SECRETS && ( {/* icon is placeholder */} diff --git a/web/src/pages/Execution/Execution.module.scss b/web/src/pages/Execution/Execution.module.scss index f1fb64bb6..524ad90cf 100644 --- a/web/src/pages/Execution/Execution.module.scss +++ b/web/src/pages/Execution/Execution.module.scss @@ -1,4 +1,59 @@ .main { min-height: var(--page-height); background-color: var(--primary-bg) !important; + + :global { + .Resizer { + background-color: var(--grey-300); + opacity: 0.2; + z-index: 1; + box-sizing: border-box; + background-clip: padding-box; + } + + .Resizer:hover { + transition: all 2s ease; + } + + .Resizer.horizontal { + margin: -5px 0; + border-top: 5px solid rgba(255, 255, 255, 0); + border-bottom: 5px solid rgba(255, 255, 255, 0); + cursor: row-resize; + } + + .Resizer.horizontal:hover { + border-top: 5px solid rgba(0, 0, 0, 0.5); + border-bottom: 5px solid rgba(0, 0, 0, 0.5); + } + + .Resizer.vertical { + width: 11px; + margin: 0 -5px; + border-left: 5px solid rgba(255, 255, 255, 0); + border-right: 5px solid rgba(255, 255, 255, 0); + cursor: col-resize; + } + + .Resizer.vertical:hover { + border-left: 5px solid rgba(0, 0, 0, 0.5); + border-right: 5px solid rgba(0, 0, 0, 0.5); + } + + .Resizer.disabled { + cursor: not-allowed; + } + + .Resizer.disabled:hover { + border-color: transparent; + } + } +} + +.container { + height: calc(100vh - var(--page-header-height)); +} + +.withError { + display: grid; } diff --git a/web/src/pages/Execution/Execution.module.scss.d.ts b/web/src/pages/Execution/Execution.module.scss.d.ts index 9e614bf2d..9828b69dc 100644 --- a/web/src/pages/Execution/Execution.module.scss.d.ts +++ b/web/src/pages/Execution/Execution.module.scss.d.ts @@ -2,5 +2,7 @@ // this is an auto-generated file declare const styles: { readonly main: string + readonly container: string + readonly withError: string } export default styles diff --git a/web/src/pages/Execution/Execution.tsx b/web/src/pages/Execution/Execution.tsx index 4dd4d2c2d..630bfa022 100644 --- a/web/src/pages/Execution/Execution.tsx +++ b/web/src/pages/Execution/Execution.tsx @@ -1,29 +1,81 @@ -import React from 'react' -import { Container, PageHeader } from '@harnessio/uicore' +import { Container, PageBody } from '@harnessio/uicore' +import React, { useState } from 'react' +import cx from 'classnames' import { useParams } from 'react-router-dom' import { useGet } from 'restful-react' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import type { CODEProps } from 'RouteDefinitions' +import SplitPane from 'react-split-pane' +import { routes, type CODEProps } from 'RouteDefinitions' import type { TypesExecution } from 'services/code' +import ExecutionStageList from 'components/ExecutionStageList/ExecutionStageList' +import Console from 'components/Console/Console' +import { getErrorMessage, voidFn } from 'utils/Utils' +import { useStrings } from 'framework/strings' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' +import noExecutionImage from '../RepositoriesListing/no-repo.svg' import css from './Execution.module.scss' const Execution = () => { - const space = useGetSpaceParam() const { pipeline, execution: executionNum } = useParams() + const { getString } = useStrings() + + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() const { - data: execution - // error, - // loading, - // refetch - // response + data: execution, + error: executionError, + loading: executionLoading } = useGet({ - path: `/api/v1/pipelines/${space}/${pipeline}/+/executions/${executionNum}` + path: `/api/v1/repos/${repoMetadata?.path}/+/pipelines/${pipeline}/executions/${executionNum}`, + lazy: !repoMetadata }) + const [selectedStage, setSelectedStage] = useState(1) + return ( - + + !execution && !loading && !executionLoading, + image: noExecutionImage, + message: getString('executions.noData') + // button: NewExecutionButton + }}> + + {execution && ( + + + {selectedStage && ( + + )} + + )} + ) } diff --git a/web/src/pages/ExecutionList/ExecutionList.module.scss b/web/src/pages/ExecutionList/ExecutionList.module.scss index 39284e82a..678e0a41b 100644 --- a/web/src/pages/ExecutionList/ExecutionList.module.scss +++ b/web/src/pages/ExecutionList/ExecutionList.module.scss @@ -59,28 +59,37 @@ padding: 6px 14px; } - .repoName { - font-weight: 600 !important; - font-size: 16px !important; - line-height: 24px !important; - color: var(--grey-800); - - .repoScope { - color: var(--grey-400); - padding: 2px 6px; - font-size: var(--font-size-xsmall) !important; - border-radius: 4px; - border: 1px solid var(--grey-200); - display: inline-block; - margin-left: var(--spacing-medium); - text-transform: uppercase; - line-height: 16px; - } + .number { + color: var(--grey-400) !important; + font-size: 0.875rem !important; + font-weight: 500 !important; } .desc { - color: var(--grey-500); - font-size: var(--font-size-small); - padding-top: var(--spacing-xsmall) !important; + color: var(--grey-800) !important; + font-size: 0.875rem !important; + font-weight: 600 !important; + } + + .author { + color: var(--grey500) !important; + font-size: 0.6875rem !important; + font-weight: 600 !important; + } + + .hash { + color: var(--primary-7) !important; + font-family: Roboto Mono !important; + font-size: 0.75rem; + font-weight: 500; + } + + .triggerLayout { + align-items: center !important; + } + + .divider { + color: var(--grey-300) !important; + font-size: 0.25rem !important; } } diff --git a/web/src/pages/ExecutionList/ExecutionList.module.scss.d.ts b/web/src/pages/ExecutionList/ExecutionList.module.scss.d.ts index bd85c3909..2f7763f6a 100644 --- a/web/src/pages/ExecutionList/ExecutionList.module.scss.d.ts +++ b/web/src/pages/ExecutionList/ExecutionList.module.scss.d.ts @@ -10,8 +10,11 @@ declare const styles: { readonly nameContainer: string readonly name: string readonly pinned: string - readonly repoName: string - readonly repoScope: string + readonly number: string readonly desc: string + readonly author: string + readonly hash: string + readonly triggerLayout: string + readonly divider: string } export default styles diff --git a/web/src/pages/ExecutionList/ExecutionList.tsx b/web/src/pages/ExecutionList/ExecutionList.tsx index 2df41ec93..04c9033e7 100644 --- a/web/src/pages/ExecutionList/ExecutionList.tsx +++ b/web/src/pages/ExecutionList/ExecutionList.tsx @@ -1,37 +1,40 @@ import React, { useMemo } from 'react' import { + Avatar, Button, ButtonVariation, Container, FlexExpander, Layout, PageBody, - PageHeader, TableV2 as Table, - Text + Text, + Utils } from '@harnessio/uicore' import { Color } from '@harnessio/design-system' import cx from 'classnames' import type { CellProps, Column } from 'react-table' import { useHistory, useParams } from 'react-router-dom' import { useGet } from 'restful-react' +import { Icon } from '@harnessio/icons' +import { Timer, Calendar } from 'iconoir-react' import { useStrings } from 'framework/strings' import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' import { useAppContext } from 'AppContext' import { NoResultCard } from 'components/NoResultCard/NoResultCard' -import { LIST_FETCHING_LIMIT, PageBrowserProps, formatDate, getErrorMessage, voidFn } from 'utils/Utils' +import { LIST_FETCHING_LIMIT, PageBrowserProps, getErrorMessage, timeDistance, voidFn } from 'utils/Utils' import type { CODEProps } from 'RouteDefinitions' import type { TypesExecution } from 'services/code' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' import { useQueryParams } from 'hooks/useQueryParams' import { usePageIndex } from 'hooks/usePageIndex' import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' import noExecutionImage from '../RepositoriesListing/no-repo.svg' import css from './ExecutionList.module.scss' const ExecutionList = () => { const { routes } = useAppContext() - const space = useGetSpaceParam() const { pipeline } = useParams() const history = useHistory() const { getString } = useStrings() @@ -39,15 +42,17 @@ const ExecutionList = () => { const pageInit = pageBrowser.page ? parseInt(pageBrowser.page) : 1 const [page, setPage] = usePageIndex(pageInit) + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() + const { data: executions, - error, - loading, - refetch, + error: executionsError, + loading: executionsLoading, response } = useGet({ - path: `/api/v1/pipelines/${space}/${pipeline}/+/executions`, - queryParams: { page, limit: LIST_FETCHING_LIMIT } + path: `/api/v1/repos/${repoMetadata?.path}/+/pipelines/${pipeline}/executions`, + queryParams: { page, limit: LIST_FETCHING_LIMIT }, + lazy: !repoMetadata }) const NewExecutionButton = ( @@ -55,38 +60,60 @@ const ExecutionList = () => { text={getString('executions.newExecutionButton')} variation={ButtonVariation.PRIMARY} disabled={true} - icon="plus"> + icon="play-outline"> ) const columns: Column[] = useMemo( () => [ { - Header: getString('repos.name'), + Header: getString('executions.description'), width: 'calc(100% - 180px)', Cell: ({ row }: CellProps) => { const record = row.original return ( - - - {record.number} - {record.status && {record.status}} - - + + + {/* TODO this icon need to depend on the status */} + + {`#${record.number}.`} + {record.title} + + + + {/* TODO need logic here for different trigger types */} + {`${record.author_name} triggered manually`} + {`|`} + {/* TODO Will need to replace this with commit action - wont match Yifan designs */} + + {record.after} + + + ) } }, { - Header: getString('repos.updated'), + Header: getString('executions.time'), width: '180px', Cell: ({ row }: CellProps) => { + const record = row.original return ( - - - {formatDate(row.original.updated as number)} - - + + + + + {timeDistance(record.started, record.finished)} + + + + + + {timeDistance(record.finished, Date.now())} ago + + + ) }, disableSortBy: true @@ -97,10 +124,22 @@ const ExecutionList = () => { return ( - + executions?.length === 0, @@ -108,7 +147,7 @@ const ExecutionList = () => { message: getString('executions.noData'), button: NewExecutionButton }}> - + @@ -125,9 +164,9 @@ const ExecutionList = () => { onRowClick={executionInfo => history.push( routes.toCODEExecution({ - space, + repoPath: repoMetadata?.path as string, pipeline: pipeline as string, - execution: String(executionInfo.id) + execution: String(executionInfo.number) }) ) } diff --git a/web/src/pages/PipelineList/PipelineList.tsx b/web/src/pages/PipelineList/PipelineList.tsx index 28fd1f670..a2cb40d33 100644 --- a/web/src/pages/PipelineList/PipelineList.tsx +++ b/web/src/pages/PipelineList/PipelineList.tsx @@ -7,7 +7,6 @@ import { FlexExpander, Layout, PageBody, - PageHeader, TableV2 as Table, Text } from '@harnessio/uicore' @@ -22,19 +21,19 @@ import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' import { NoResultCard } from 'components/NoResultCard/NoResultCard' import { LIST_FETCHING_LIMIT, PageBrowserProps, formatDate, getErrorMessage, voidFn } from 'utils/Utils' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' import type { TypesPipeline } from 'services/code' import { useQueryParams } from 'hooks/useQueryParams' import { usePageIndex } from 'hooks/usePageIndex' import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' import { useAppContext } from 'AppContext' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' import noPipelineImage from '../RepositoriesListing/no-repo.svg' import css from './PipelineList.module.scss' import useNewPipelineModal from 'pages/NewPipeline/NewPipelineModal' const PipelineList = () => { const { routes } = useAppContext() - const space = useGetSpaceParam() const history = useHistory() const { getString } = useStrings() const [searchTerm, setSearchTerm] = useState() @@ -42,15 +41,17 @@ const PipelineList = () => { const pageInit = pageBrowser.page ? parseInt(pageBrowser.page) : 1 const [page, setPage] = usePageIndex(pageInit) + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() + const { data: pipelines, - error, - loading, - refetch, + error: pipelinesError, + loading: pipelinesLoading, response } = useGet({ - path: `/api/v1/spaces/${space}/+/pipelines`, - queryParams: { page, limit: LIST_FETCHING_LIMIT, query: searchTerm } + path: `/api/v1/repos/${repoMetadata?.path}/+/pipelines`, + queryParams: { page, limit: LIST_FETCHING_LIMIT, query: searchTerm }, + lazy: !repoMetadata }) const { openModal } = useNewPipelineModal() @@ -145,10 +146,14 @@ const PipelineList = () => { return ( - + pipelines?.length === 0 && searchTerm === undefined, @@ -156,7 +161,10 @@ const PipelineList = () => { message: getString('pipelines.noData'), button: NewPipelineButton }}> - + @@ -172,7 +180,12 @@ const PipelineList = () => { columns={columns} data={pipelines || []} onRowClick={pipelineInfo => - history.push(routes.toCODEExecutions({ space, pipeline: pipelineInfo.uid as string })) + history.push( + routes.toCODEExecutions({ + repoPath: repoMetadata?.path as string, + pipeline: pipelineInfo.uid as string + }) + ) } getRowClassName={row => cx(css.row, !row.original.description && css.noDesc)} /> diff --git a/web/src/pages/Repository/RepositoryContent/FileContent/FileContent.tsx b/web/src/pages/Repository/RepositoryContent/FileContent/FileContent.tsx index cef781332..08a065234 100644 --- a/web/src/pages/Repository/RepositoryContent/FileContent/FileContent.tsx +++ b/web/src/pages/Repository/RepositoryContent/FileContent/FileContent.tsx @@ -35,7 +35,7 @@ import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButto import { PlainButton } from 'components/PlainButton/PlainButton' import { CommitsView } from 'components/CommitsView/CommitsView' import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { FileCategory, useFileContentViewerDecision } from 'utils/FileUtils' +import { FileCategory, RepoContentExtended, useFileContentViewerDecision } from 'utils/FileUtils' import { useDownloadRawFile } from 'hooks/useDownloadRawFile' import { usePageIndex } from 'hooks/usePageIndex' import { Readme } from '../FolderContent/Readme' @@ -233,7 +233,12 @@ export function FileContent({ - + @@ -375,6 +380,18 @@ export function FileContent({ source={decodeGitContent(base64Data)} /> + + + + + + diff --git a/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.module.scss b/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.module.scss index de6d3865c..aa7b68da3 100644 --- a/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.module.scss +++ b/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.module.scss @@ -9,7 +9,7 @@ text-transform: none; color: var(--grey-400); font-weight: 500; - font-size: 13px; + font-size: 12px; } } @@ -24,7 +24,7 @@ margin-bottom: 0; .rowText { - font-size: 13px; + font-size: 12px; font-weight: 400; } } @@ -63,7 +63,7 @@ &, .text { - font-size: 13px; + font-size: 12px; font-weight: 400; cursor: pointer !important; } diff --git a/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.tsx b/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.tsx index 585842a2e..2ebe40a33 100644 --- a/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.tsx +++ b/web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.tsx @@ -9,10 +9,11 @@ import { TextProps, useIsMounted } from '@harnessio/uicore' -import { Icon } from '@harnessio/icons' import { Color } from '@harnessio/design-system' import cx from 'classnames' import type { CellProps, Column } from 'react-table' +import { Page } from 'iconoir-react' + import { Render } from 'react-jsx-match' import { chunk, sortBy, throttle } from 'lodash-es' import { useMutate } from 'restful-react' @@ -20,17 +21,30 @@ import { Link, useHistory } from 'react-router-dom' import { useAppContext } from 'AppContext' import type { OpenapiContentInfo, OpenapiDirContent, TypesCommit } from 'services/code' import { formatDate, isInViewport, LIST_FETCHING_LIMIT } from 'utils/Utils' -import { findReadmeInfo, CodeIcon, GitInfoProps, isFile } from 'utils/GitUtils' +import { findReadmeInfo, GitInfoProps, isFile, isSymlink, isSubmodule } from 'utils/GitUtils' import { LatestCommitForFolder } from 'components/LatestCommit/LatestCommit' import { CommitActions } from 'components/CommitActions/CommitActions' import { useEventListener } from 'hooks/useEventListener' import { Readme } from './Readme' import repositoryCSS from '../../Repository.module.scss' -import CodeFile from '../../../../icons/CodeFileFill.svg' +import CodeFolder from '../../../../icons/CodeFileFill.svg' +import Submodule from '../../../../icons/Submodules.svg' +import Symlink from '../../../../icons/Symlink.svg' import css from './FolderContent.module.scss' type FolderContentProps = Pick +const checkIcon = (row: OpenapiContentInfo): React.ReactElement => { + if (isFile(row)) { + return + } else if (isSymlink(row)) { + return + } else if (isSubmodule(row)) { + return + } else { + return + } +} export function FolderContent({ repoMetadata, resourceContent, gitRef }: FolderContentProps) { const history = useHistory() const { routes, standalone } = useAppContext() @@ -40,23 +54,25 @@ export function FolderContent({ repoMetadata, resourceContent, gitRef }: FolderC Header: 'Files', id: 'name', width: '30%', - Cell: ({ row }: CellProps) => ( - - - {isFile(row.original) ? : } - - - - ) + Cell: ({ row }: CellProps) => { + return ( + + + {checkIcon(row.original)} + + + + ) + } }, { Header: 'Date', @@ -81,7 +97,7 @@ export function FolderContent({ repoMetadata, resourceContent, gitRef }: FolderC ) } ], - [] // eslint-disable-line react-hooks/exhaustive-deps + [gitRef] // eslint-disable-line react-hooks/exhaustive-deps ) const readmeInfo = useMemo(() => findReadmeInfo(resourceContent), [resourceContent]) const scrollDOMElement = useMemo( diff --git a/web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.tsx b/web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.tsx index 5c03fd96d..28d94b802 100644 --- a/web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.tsx +++ b/web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useMemo, useState } from 'react' -import { Container, TableV2 as Table, Text, Avatar, Tag, useToaster } from '@harnessio/uicore' +import { Container, TableV2 as Table, Text, Avatar, Tag, useToaster, StringSubstitute } from '@harnessio/uicore' import { Color, Intent } from '@harnessio/design-system' import type { CellProps, Column } from 'react-table' import { Link, useHistory } from 'react-router-dom' @@ -128,7 +128,15 @@ export function BranchesContent({ repoMetadata, searchTerm = '', branches, onDel action: async () => { deleteBranch({}) .then(() => { - showSuccess(getString('branchDeleted', { branch: row.original.name }), 5000) + showSuccess( + , + 5000 + ) onDeleteSuccess() }) .catch(error => { diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx b/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx index ee939aabb..a832cae99 100644 --- a/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx +++ b/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx @@ -1,5 +1,5 @@ import React, { useMemo } from 'react' -import { Container, TableV2 as Table, Text, Avatar, useToaster } from '@harnessio/uicore' +import { Container, TableV2 as Table, Text, Avatar, useToaster, StringSubstitute } from '@harnessio/uicore' import { Color, Intent } from '@harnessio/design-system' import type { CellProps, Column } from 'react-table' import { Link, useHistory } from 'react-router-dom' @@ -137,7 +137,15 @@ export function TagsContent({ repoMetadata, searchTerm = '', branches, onDeleteS action: async () => { deleteBranch({}) .then(() => { - showSuccess(getString('tagDeleted', { branch: row.original.name }), 5000) + showSuccess( + , + 5000 + ) onDeleteSuccess() }) .catch(error => { diff --git a/web/src/services/code/index.tsx b/web/src/services/code/index.tsx index d2fee9ae9..95a3c0ebf 100644 --- a/web/src/services/code/index.tsx +++ b/web/src/services/code/index.tsx @@ -45,8 +45,6 @@ export type EnumPullReqReviewerType = 'assigned' | 'requested' | 'self_assigned' export type EnumPullReqState = 'closed' | 'merged' | 'open' -export type EnumScmType = 'GITNESS' | 'GITHUB' | 'GITLAB' | 'UNKNOWN' - export type EnumTokenType = string export type EnumWebhookExecutionResult = 'fatal_error' | 'retriable_error' | 'success' | null @@ -173,6 +171,14 @@ export interface OpenapiCreateBranchRequest { target?: string } +export interface OpenapiCreateConnectorRequest { + data?: string + description?: string + space_ref?: string + type?: string + uid?: string +} + export interface OpenapiCreateExecutionRequest { status?: string } @@ -185,9 +191,6 @@ export interface OpenapiCreatePipelineRequest { config_path?: string default_branch?: string description?: string - repo_ref?: string - repo_type?: EnumScmType - space_ref?: string uid?: string } @@ -236,12 +239,25 @@ export interface OpenapiCreateTagRequest { target?: string } +export interface OpenapiCreateTemplateRequest { + data?: string + description?: string + space_ref?: string + type?: string + uid?: string +} + export interface OpenapiCreateTokenRequest { grants?: EnumAccessGrant lifetime?: TimeDuration uid?: string } +export interface OpenapiCreateTriggerRequest { + description?: string + uid?: string +} + export interface OpenapiCreateWebhookRequest { description?: string display_name?: string @@ -318,6 +334,12 @@ export interface OpenapiUpdateAdminRequest { admin?: boolean } +export interface OpenapiUpdateConnectorRequest { + data?: string + description?: string + uid?: string +} + export interface OpenapiUpdateExecutionRequest { status?: string } @@ -349,6 +371,17 @@ export interface OpenapiUpdateSpaceRequest { is_public?: boolean | null } +export interface OpenapiUpdateTemplateRequest { + data?: string + description?: string + uid?: string +} + +export interface OpenapiUpdateTriggerRequest { + description?: string + uid?: string +} + export interface OpenapiUpdateWebhookRequest { description?: string | null display_name?: string | null @@ -498,6 +531,17 @@ export interface TypesCommit { title?: string } +export interface TypesConnector { + created?: number + data?: string + description?: string + id?: number + space_id?: number + type?: string + uid?: string + updated?: number +} + export interface TypesDiffStats { commits?: number files_changed?: number @@ -592,15 +636,19 @@ export interface TypesPipeline { description?: string id?: number repo_id?: number - repo_name?: string - repo_type?: EnumScmType seq?: number - space_id?: number uid?: string updated?: number version?: number } +export interface TypesPlugin { + description?: string + logo?: string + spec?: string + uid?: string +} + export interface TypesPrincipalInfo { created?: number display_name?: string @@ -783,6 +831,16 @@ export interface TypesStep { stopped?: number } +export interface TypesTemplate { + created?: number + data?: string + description?: string + id?: number + space_id?: number + uid?: string + updated?: number +} + export interface TypesToken { created_by?: number expires_at?: number | null @@ -798,6 +856,15 @@ export interface TypesTokenResponse { token?: TypesToken } +export interface TypesTrigger { + created?: number + description?: string + id?: number + pipeline_id?: number + uid?: string + updated?: number +} + export interface TypesUser { admin?: boolean blocked?: boolean @@ -1006,6 +1073,109 @@ export const useUpdateUserAdmin = ({ user_uid, ...props }: UseUpdateUserAdminPro { base: getConfig('code/api/v1'), pathParams: { user_uid }, ...props } ) +export type CreateConnectorProps = Omit< + MutateProps, + 'path' | 'verb' +> + +export const CreateConnector = (props: CreateConnectorProps) => ( + + verb="POST" + path={`/connectors`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseCreateConnectorProps = Omit< + UseMutateProps, + 'path' | 'verb' +> + +export const useCreateConnector = (props: UseCreateConnectorProps) => + useMutate('POST', `/connectors`, { + base: getConfig('code/api/v1'), + ...props + }) + +export type DeleteConnectorProps = Omit, 'path' | 'verb'> + +export const DeleteConnector = (props: DeleteConnectorProps) => ( + + verb="DELETE" + path={`/connectors`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseDeleteConnectorProps = Omit, 'path' | 'verb'> + +export const useDeleteConnector = (props: UseDeleteConnectorProps) => + useMutate('DELETE', `/connectors`, { + base: getConfig('code/api/v1'), + ...props + }) + +export interface FindConnectorPathParams { + connector_ref: string +} + +export type FindConnectorProps = Omit, 'path'> & + FindConnectorPathParams + +export const FindConnector = ({ connector_ref, ...props }: FindConnectorProps) => ( + + path={`/connectors/${connector_ref}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseFindConnectorProps = Omit< + UseGetProps, + 'path' +> & + FindConnectorPathParams + +export const useFindConnector = ({ connector_ref, ...props }: UseFindConnectorProps) => + useGet( + (paramsInPath: FindConnectorPathParams) => `/connectors/${paramsInPath.connector_ref}`, + { base: getConfig('code/api/v1'), pathParams: { connector_ref }, ...props } + ) + +export interface UpdateConnectorPathParams { + connector_ref: string +} + +export type UpdateConnectorProps = Omit< + MutateProps, + 'path' | 'verb' +> & + UpdateConnectorPathParams + +export const UpdateConnector = ({ connector_ref, ...props }: UpdateConnectorProps) => ( + + verb="PATCH" + path={`/connectors/${connector_ref}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseUpdateConnectorProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + UpdateConnectorPathParams + +export const useUpdateConnector = ({ connector_ref, ...props }: UseUpdateConnectorProps) => + useMutate( + 'PATCH', + (paramsInPath: UpdateConnectorPathParams) => `/connectors/${paramsInPath.connector_ref}`, + { base: getConfig('code/api/v1'), pathParams: { connector_ref }, ...props } + ) + export type OnLoginProps = Omit< MutateProps, 'path' | 'verb' @@ -1047,110 +1217,7 @@ export type UseOpLogoutProps = Omit useMutate('POST', `/logout`, { base: getConfig('code/api/v1'), ...props }) -export type CreatePipelineProps = Omit< - MutateProps, - 'path' | 'verb' -> - -export const CreatePipeline = (props: CreatePipelineProps) => ( - - verb="POST" - path={`/pipelines`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseCreatePipelineProps = Omit< - UseMutateProps, - 'path' | 'verb' -> - -export const useCreatePipeline = (props: UseCreatePipelineProps) => - useMutate('POST', `/pipelines`, { - base: getConfig('code/api/v1'), - ...props - }) - -export type DeletePipelineProps = Omit, 'path' | 'verb'> - -export const DeletePipeline = (props: DeletePipelineProps) => ( - - verb="DELETE" - path={`/pipelines`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseDeletePipelineProps = Omit, 'path' | 'verb'> - -export const useDeletePipeline = (props: UseDeletePipelineProps) => - useMutate('DELETE', `/pipelines`, { - base: getConfig('code/api/v1'), - ...props - }) - -export interface FindPipelinePathParams { - pipeline_ref: string -} - -export type FindPipelineProps = Omit, 'path'> & - FindPipelinePathParams - -export const FindPipeline = ({ pipeline_ref, ...props }: FindPipelineProps) => ( - - path={`/pipelines/${pipeline_ref}`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseFindPipelineProps = Omit< - UseGetProps, - 'path' -> & - FindPipelinePathParams - -export const useFindPipeline = ({ pipeline_ref, ...props }: UseFindPipelineProps) => - useGet( - (paramsInPath: FindPipelinePathParams) => `/pipelines/${paramsInPath.pipeline_ref}`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref }, ...props } - ) - -export interface UpdatePipelinePathParams { - pipeline_ref: string -} - -export type UpdatePipelineProps = Omit< - MutateProps, - 'path' | 'verb' -> & - UpdatePipelinePathParams - -export const UpdatePipeline = ({ pipeline_ref, ...props }: UpdatePipelineProps) => ( - - verb="PATCH" - path={`/pipelines/${pipeline_ref}`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseUpdatePipelineProps = Omit< - UseMutateProps, - 'path' | 'verb' -> & - UpdatePipelinePathParams - -export const useUpdatePipeline = ({ pipeline_ref, ...props }: UseUpdatePipelineProps) => - useMutate( - 'PATCH', - (paramsInPath: UpdatePipelinePathParams) => `/pipelines/${paramsInPath.pipeline_ref}`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref }, ...props } - ) - -export interface ListExecutionsQueryParams { +export interface ListPluginsQueryParams { /** * The page to return. */ @@ -1159,201 +1226,29 @@ export interface ListExecutionsQueryParams { * The maximum number of results to return. */ limit?: number + /** + * The substring which is used to filter the plugins by their name. + */ + query?: string } -export interface ListExecutionsPathParams { - pipeline_ref: string -} +export type ListPluginsProps = Omit, 'path'> -export type ListExecutionsProps = Omit< - GetProps, - 'path' -> & - ListExecutionsPathParams - -export const ListExecutions = ({ pipeline_ref, ...props }: ListExecutionsProps) => ( - - path={`/pipelines/${pipeline_ref}/executions`} +export const ListPlugins = (props: ListPluginsProps) => ( + + path={`/plugins`} base={getConfig('code/api/v1')} {...props} /> ) -export type UseListExecutionsProps = Omit< - UseGetProps, - 'path' -> & - ListExecutionsPathParams +export type UseListPluginsProps = Omit, 'path'> -export const useListExecutions = ({ pipeline_ref, ...props }: UseListExecutionsProps) => - useGet( - (paramsInPath: ListExecutionsPathParams) => `/pipelines/${paramsInPath.pipeline_ref}/executions`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref }, ...props } - ) - -export interface CreateExecutionPathParams { - pipeline_ref: string -} - -export type CreateExecutionProps = Omit< - MutateProps, - 'path' | 'verb' -> & - CreateExecutionPathParams - -export const CreateExecution = ({ pipeline_ref, ...props }: CreateExecutionProps) => ( - - verb="POST" - path={`/pipelines/${pipeline_ref}/executions`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseCreateExecutionProps = Omit< - UseMutateProps, - 'path' | 'verb' -> & - CreateExecutionPathParams - -export const useCreateExecution = ({ pipeline_ref, ...props }: UseCreateExecutionProps) => - useMutate( - 'POST', - (paramsInPath: CreateExecutionPathParams) => `/pipelines/${paramsInPath.pipeline_ref}/executions`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref }, ...props } - ) - -export interface DeleteExecutionPathParams { - pipeline_ref: string -} - -export type DeleteExecutionProps = Omit< - MutateProps, - 'path' | 'verb' -> & - DeleteExecutionPathParams - -export const DeleteExecution = ({ pipeline_ref, ...props }: DeleteExecutionProps) => ( - - verb="DELETE" - path={`/pipelines/${pipeline_ref}/executions`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseDeleteExecutionProps = Omit< - UseMutateProps, - 'path' | 'verb' -> & - DeleteExecutionPathParams - -export const useDeleteExecution = ({ pipeline_ref, ...props }: UseDeleteExecutionProps) => - useMutate( - 'DELETE', - (paramsInPath: DeleteExecutionPathParams) => `/pipelines/${paramsInPath.pipeline_ref}/executions`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref }, ...props } - ) - -export interface FindExecutionPathParams { - pipeline_ref: string - execution_number: string -} - -export type FindExecutionProps = Omit, 'path'> & - FindExecutionPathParams - -export const FindExecution = ({ pipeline_ref, execution_number, ...props }: FindExecutionProps) => ( - - path={`/pipelines/${pipeline_ref}/executions/${execution_number}`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseFindExecutionProps = Omit< - UseGetProps, - 'path' -> & - FindExecutionPathParams - -export const useFindExecution = ({ pipeline_ref, execution_number, ...props }: UseFindExecutionProps) => - useGet( - (paramsInPath: FindExecutionPathParams) => - `/pipelines/${paramsInPath.pipeline_ref}/executions/${paramsInPath.execution_number}`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref, execution_number }, ...props } - ) - -export interface UpdateExecutionPathParams { - pipeline_ref: string - execution_number: string -} - -export type UpdateExecutionProps = Omit< - MutateProps, - 'path' | 'verb' -> & - UpdateExecutionPathParams - -export const UpdateExecution = ({ pipeline_ref, execution_number, ...props }: UpdateExecutionProps) => ( - - verb="PATCH" - path={`/pipelines/${pipeline_ref}/executions/${execution_number}`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseUpdateExecutionProps = Omit< - UseMutateProps, - 'path' | 'verb' -> & - UpdateExecutionPathParams - -export const useUpdateExecution = ({ pipeline_ref, execution_number, ...props }: UseUpdateExecutionProps) => - useMutate( - 'PATCH', - (paramsInPath: UpdateExecutionPathParams) => - `/pipelines/${paramsInPath.pipeline_ref}/executions/${paramsInPath.execution_number}`, - { base: getConfig('code/api/v1'), pathParams: { pipeline_ref, execution_number }, ...props } - ) - -export interface ViewLogsPathParams { - pipeline_ref: string - execution_number: string - stage_number: string - step_number: string -} - -export type ViewLogsProps = Omit, 'path'> & ViewLogsPathParams - -export const ViewLogs = ({ pipeline_ref, execution_number, stage_number, step_number, ...props }: ViewLogsProps) => ( - - path={`/pipelines/${pipeline_ref}/executions/${execution_number}/logs/${stage_number}/${step_number}`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseViewLogsProps = Omit, 'path'> & - ViewLogsPathParams - -export const useViewLogs = ({ - pipeline_ref, - execution_number, - stage_number, - step_number, - ...props -}: UseViewLogsProps) => - useGet( - (paramsInPath: ViewLogsPathParams) => - `/pipelines/${paramsInPath.pipeline_ref}/executions/${paramsInPath.execution_number}/logs/${paramsInPath.stage_number}/${paramsInPath.step_number}`, - { - base: getConfig('code/api/v1'), - pathParams: { pipeline_ref, execution_number, stage_number, step_number }, - ...props - } - ) +export const useListPlugins = (props: UseListPluginsProps) => + useGet(`/plugins`, { + base: getConfig('code/api/v1'), + ...props + }) export interface ListPrincipalsQueryParams { /** @@ -2331,6 +2226,576 @@ export const useDeleteRepositoryPath = ({ repo_ref, ...props }: UseDeleteReposit { base: getConfig('code/api/v1'), pathParams: { repo_ref }, ...props } ) +export interface ListPipelinesQueryParams { + /** + * The substring which is used to filter the repositories by their path name. + */ + query?: string + /** + * The page to return. + */ + page?: number + /** + * The maximum number of results to return. + */ + limit?: number +} + +export interface ListPipelinesPathParams { + repo_ref: string +} + +export type ListPipelinesProps = Omit< + GetProps, + 'path' +> & + ListPipelinesPathParams + +export const ListPipelines = ({ repo_ref, ...props }: ListPipelinesProps) => ( + + path={`/repos/${repo_ref}/pipelines`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseListPipelinesProps = Omit< + UseGetProps, + 'path' +> & + ListPipelinesPathParams + +export const useListPipelines = ({ repo_ref, ...props }: UseListPipelinesProps) => + useGet( + (paramsInPath: ListPipelinesPathParams) => `/repos/${paramsInPath.repo_ref}/pipelines`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref }, ...props } + ) + +export interface CreatePipelinePathParams { + repo_ref: string +} + +export type CreatePipelineProps = Omit< + MutateProps, + 'path' | 'verb' +> & + CreatePipelinePathParams + +export const CreatePipeline = ({ repo_ref, ...props }: CreatePipelineProps) => ( + + verb="POST" + path={`/repos/${repo_ref}/pipelines`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseCreatePipelineProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + CreatePipelinePathParams + +export const useCreatePipeline = ({ repo_ref, ...props }: UseCreatePipelineProps) => + useMutate( + 'POST', + (paramsInPath: CreatePipelinePathParams) => `/repos/${paramsInPath.repo_ref}/pipelines`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref }, ...props } + ) + +export interface DeletePipelinePathParams { + repo_ref: string +} + +export type DeletePipelineProps = Omit< + MutateProps, + 'path' | 'verb' +> & + DeletePipelinePathParams + +export const DeletePipeline = ({ repo_ref, ...props }: DeletePipelineProps) => ( + + verb="DELETE" + path={`/repos/${repo_ref}/pipelines`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseDeletePipelineProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + DeletePipelinePathParams + +export const useDeletePipeline = ({ repo_ref, ...props }: UseDeletePipelineProps) => + useMutate( + 'DELETE', + (paramsInPath: DeletePipelinePathParams) => `/repos/${paramsInPath.repo_ref}/pipelines`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref }, ...props } + ) + +export interface FindPipelinePathParams { + repo_ref: string + pipeline_uid: string +} + +export type FindPipelineProps = Omit, 'path'> & + FindPipelinePathParams + +export const FindPipeline = ({ repo_ref, pipeline_uid, ...props }: FindPipelineProps) => ( + + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseFindPipelineProps = Omit< + UseGetProps, + 'path' +> & + FindPipelinePathParams + +export const useFindPipeline = ({ repo_ref, pipeline_uid, ...props }: UseFindPipelineProps) => + useGet( + (paramsInPath: FindPipelinePathParams) => `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface UpdatePipelinePathParams { + repo_ref: string + pipeline_uid: string +} + +export type UpdatePipelineProps = Omit< + MutateProps, + 'path' | 'verb' +> & + UpdatePipelinePathParams + +export const UpdatePipeline = ({ repo_ref, pipeline_uid, ...props }: UpdatePipelineProps) => ( + + verb="PATCH" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseUpdatePipelineProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + UpdatePipelinePathParams + +export const useUpdatePipeline = ({ repo_ref, pipeline_uid, ...props }: UseUpdatePipelineProps) => + useMutate( + 'PATCH', + (paramsInPath: UpdatePipelinePathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface ListExecutionsQueryParams { + /** + * The page to return. + */ + page?: number + /** + * The maximum number of results to return. + */ + limit?: number +} + +export interface ListExecutionsPathParams { + repo_ref: string + pipeline_uid: string +} + +export type ListExecutionsProps = Omit< + GetProps, + 'path' +> & + ListExecutionsPathParams + +export const ListExecutions = ({ repo_ref, pipeline_uid, ...props }: ListExecutionsProps) => ( + + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/executions`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseListExecutionsProps = Omit< + UseGetProps, + 'path' +> & + ListExecutionsPathParams + +export const useListExecutions = ({ repo_ref, pipeline_uid, ...props }: UseListExecutionsProps) => + useGet( + (paramsInPath: ListExecutionsPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/executions`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface CreateExecutionPathParams { + repo_ref: string + pipeline_uid: string +} + +export type CreateExecutionProps = Omit< + MutateProps, + 'path' | 'verb' +> & + CreateExecutionPathParams + +export const CreateExecution = ({ repo_ref, pipeline_uid, ...props }: CreateExecutionProps) => ( + + verb="POST" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/executions`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseCreateExecutionProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + CreateExecutionPathParams + +export const useCreateExecution = ({ repo_ref, pipeline_uid, ...props }: UseCreateExecutionProps) => + useMutate( + 'POST', + (paramsInPath: CreateExecutionPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/executions`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface DeleteExecutionPathParams { + repo_ref: string + pipeline_uid: string +} + +export type DeleteExecutionProps = Omit< + MutateProps, + 'path' | 'verb' +> & + DeleteExecutionPathParams + +export const DeleteExecution = ({ repo_ref, pipeline_uid, ...props }: DeleteExecutionProps) => ( + + verb="DELETE" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/executions`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseDeleteExecutionProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + DeleteExecutionPathParams + +export const useDeleteExecution = ({ repo_ref, pipeline_uid, ...props }: UseDeleteExecutionProps) => + useMutate( + 'DELETE', + (paramsInPath: DeleteExecutionPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/executions`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface FindExecutionPathParams { + repo_ref: string + pipeline_uid: string + execution_number: string +} + +export type FindExecutionProps = Omit, 'path'> & + FindExecutionPathParams + +export const FindExecution = ({ repo_ref, pipeline_uid, execution_number, ...props }: FindExecutionProps) => ( + + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/executions/${execution_number}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseFindExecutionProps = Omit< + UseGetProps, + 'path' +> & + FindExecutionPathParams + +export const useFindExecution = ({ repo_ref, pipeline_uid, execution_number, ...props }: UseFindExecutionProps) => + useGet( + (paramsInPath: FindExecutionPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/executions/${paramsInPath.execution_number}`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid, execution_number }, ...props } + ) + +export interface UpdateExecutionPathParams { + repo_ref: string + pipeline_uid: string + execution_number: string +} + +export type UpdateExecutionProps = Omit< + MutateProps, + 'path' | 'verb' +> & + UpdateExecutionPathParams + +export const UpdateExecution = ({ repo_ref, pipeline_uid, execution_number, ...props }: UpdateExecutionProps) => ( + + verb="PATCH" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/executions/${execution_number}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseUpdateExecutionProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + UpdateExecutionPathParams + +export const useUpdateExecution = ({ repo_ref, pipeline_uid, execution_number, ...props }: UseUpdateExecutionProps) => + useMutate( + 'PATCH', + (paramsInPath: UpdateExecutionPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/executions/${paramsInPath.execution_number}`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid, execution_number }, ...props } + ) + +export interface ViewLogsPathParams { + repo_ref: string + pipeline_uid: string + execution_number: string + stage_number: string + step_number: string +} + +export type ViewLogsProps = Omit, 'path'> & ViewLogsPathParams + +export const ViewLogs = ({ + repo_ref, + pipeline_uid, + execution_number, + stage_number, + step_number, + ...props +}: ViewLogsProps) => ( + + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/executions/${execution_number}/logs/${stage_number}/${step_number}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseViewLogsProps = Omit, 'path'> & + ViewLogsPathParams + +export const useViewLogs = ({ + repo_ref, + pipeline_uid, + execution_number, + stage_number, + step_number, + ...props +}: UseViewLogsProps) => + useGet( + (paramsInPath: ViewLogsPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/executions/${paramsInPath.execution_number}/logs/${paramsInPath.stage_number}/${paramsInPath.step_number}`, + { + base: getConfig('code/api/v1'), + pathParams: { repo_ref, pipeline_uid, execution_number, stage_number, step_number }, + ...props + } + ) + +export interface ListTriggersQueryParams { + /** + * The substring which is used to filter the repositories by their path name. + */ + query?: string + /** + * The page to return. + */ + page?: number + /** + * The maximum number of results to return. + */ + limit?: number +} + +export interface ListTriggersPathParams { + repo_ref: string + pipeline_uid: string +} + +export type ListTriggersProps = Omit< + GetProps, + 'path' +> & + ListTriggersPathParams + +export const ListTriggers = ({ repo_ref, pipeline_uid, ...props }: ListTriggersProps) => ( + + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/triggers`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseListTriggersProps = Omit< + UseGetProps, + 'path' +> & + ListTriggersPathParams + +export const useListTriggers = ({ repo_ref, pipeline_uid, ...props }: UseListTriggersProps) => + useGet( + (paramsInPath: ListTriggersPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/triggers`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface CreateTriggerPathParams { + repo_ref: string + pipeline_uid: string +} + +export type CreateTriggerProps = Omit< + MutateProps, + 'path' | 'verb' +> & + CreateTriggerPathParams + +export const CreateTrigger = ({ repo_ref, pipeline_uid, ...props }: CreateTriggerProps) => ( + + verb="POST" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/triggers`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseCreateTriggerProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + CreateTriggerPathParams + +export const useCreateTrigger = ({ repo_ref, pipeline_uid, ...props }: UseCreateTriggerProps) => + useMutate( + 'POST', + (paramsInPath: CreateTriggerPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/triggers`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface DeleteTriggerPathParams { + repo_ref: string + pipeline_uid: string +} + +export type DeleteTriggerProps = Omit< + MutateProps, + 'path' | 'verb' +> & + DeleteTriggerPathParams + +export const DeleteTrigger = ({ repo_ref, pipeline_uid, ...props }: DeleteTriggerProps) => ( + + verb="DELETE" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/triggers`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseDeleteTriggerProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + DeleteTriggerPathParams + +export const useDeleteTrigger = ({ repo_ref, pipeline_uid, ...props }: UseDeleteTriggerProps) => + useMutate( + 'DELETE', + (paramsInPath: DeleteTriggerPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/triggers`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid }, ...props } + ) + +export interface FindTriggerPathParams { + repo_ref: string + pipeline_uid: string + trigger_uid: string +} + +export type FindTriggerProps = Omit, 'path'> & + FindTriggerPathParams + +export const FindTrigger = ({ repo_ref, pipeline_uid, trigger_uid, ...props }: FindTriggerProps) => ( + + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/triggers/${trigger_uid}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseFindTriggerProps = Omit, 'path'> & + FindTriggerPathParams + +export const useFindTrigger = ({ repo_ref, pipeline_uid, trigger_uid, ...props }: UseFindTriggerProps) => + useGet( + (paramsInPath: FindTriggerPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/triggers/${paramsInPath.trigger_uid}`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid, trigger_uid }, ...props } + ) + +export interface UpdateTriggerPathParams { + repo_ref: string + pipeline_uid: string + trigger_uid: string +} + +export type UpdateTriggerProps = Omit< + MutateProps, + 'path' | 'verb' +> & + UpdateTriggerPathParams + +export const UpdateTrigger = ({ repo_ref, pipeline_uid, trigger_uid, ...props }: UpdateTriggerProps) => ( + + verb="PATCH" + path={`/repos/${repo_ref}/pipelines/${pipeline_uid}/triggers/${trigger_uid}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseUpdateTriggerProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + UpdateTriggerPathParams + +export const useUpdateTrigger = ({ repo_ref, pipeline_uid, trigger_uid, ...props }: UseUpdateTriggerProps) => + useMutate( + 'PATCH', + (paramsInPath: UpdateTriggerPathParams) => + `/repos/${paramsInPath.repo_ref}/pipelines/${paramsInPath.pipeline_uid}/triggers/${paramsInPath.trigger_uid}`, + { base: getConfig('code/api/v1'), pathParams: { repo_ref, pipeline_uid, trigger_uid }, ...props } + ) + export interface ListPullReqQueryParams { /** * The state of the pull requests to include in the result. @@ -3777,6 +4242,51 @@ export const useUpdateSpace = ({ space_ref, ...props }: UseUpdateSpaceProps) => { base: getConfig('code/api/v1'), pathParams: { space_ref }, ...props } ) +export interface ListConnectorsQueryParams { + /** + * The substring which is used to filter the repositories by their path name. + */ + query?: string + /** + * The page to return. + */ + page?: number + /** + * The maximum number of results to return. + */ + limit?: number +} + +export interface ListConnectorsPathParams { + space_ref: string +} + +export type ListConnectorsProps = Omit< + GetProps, + 'path' +> & + ListConnectorsPathParams + +export const ListConnectors = ({ space_ref, ...props }: ListConnectorsProps) => ( + + path={`/spaces/${space_ref}/connectors`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseListConnectorsProps = Omit< + UseGetProps, + 'path' +> & + ListConnectorsPathParams + +export const useListConnectors = ({ space_ref, ...props }: UseListConnectorsProps) => + useGet( + (paramsInPath: ListConnectorsPathParams) => `/spaces/${paramsInPath.space_ref}/connectors`, + { base: getConfig('code/api/v1'), pathParams: { space_ref }, ...props } + ) + export interface MembershipListQueryParams { /** * The substring by which the space members are filtered. @@ -4073,51 +4583,6 @@ export const useDeletePath = ({ space_ref, ...props }: UseDeletePathProps) => { base: getConfig('code/api/v1'), pathParams: { space_ref }, ...props } ) -export interface ListPipelinesQueryParams { - /** - * The substring which is used to filter the repositories by their path name. - */ - query?: string - /** - * The page to return. - */ - page?: number - /** - * The maximum number of results to return. - */ - limit?: number -} - -export interface ListPipelinesPathParams { - space_ref: string -} - -export type ListPipelinesProps = Omit< - GetProps, - 'path' -> & - ListPipelinesPathParams - -export const ListPipelines = ({ space_ref, ...props }: ListPipelinesProps) => ( - - path={`/spaces/${space_ref}/pipelines`} - base={getConfig('code/api/v1')} - {...props} - /> -) - -export type UseListPipelinesProps = Omit< - UseGetProps, - 'path' -> & - ListPipelinesPathParams - -export const useListPipelines = ({ space_ref, ...props }: UseListPipelinesProps) => - useGet( - (paramsInPath: ListPipelinesPathParams) => `/spaces/${paramsInPath.space_ref}/pipelines`, - { base: getConfig('code/api/v1'), pathParams: { space_ref }, ...props } - ) - export interface ListReposQueryParams { /** * The substring which is used to filter the repositories by their path name. @@ -4299,6 +4764,51 @@ export const useListSpaces = ({ space_ref, ...props }: UseListSpacesProps) => { base: getConfig('code/api/v1'), pathParams: { space_ref }, ...props } ) +export interface ListTemplatesQueryParams { + /** + * The substring which is used to filter the repositories by their path name. + */ + query?: string + /** + * The page to return. + */ + page?: number + /** + * The maximum number of results to return. + */ + limit?: number +} + +export interface ListTemplatesPathParams { + space_ref: string +} + +export type ListTemplatesProps = Omit< + GetProps, + 'path' +> & + ListTemplatesPathParams + +export const ListTemplates = ({ space_ref, ...props }: ListTemplatesProps) => ( + + path={`/spaces/${space_ref}/templates`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseListTemplatesProps = Omit< + UseGetProps, + 'path' +> & + ListTemplatesPathParams + +export const useListTemplates = ({ space_ref, ...props }: UseListTemplatesProps) => + useGet( + (paramsInPath: ListTemplatesPathParams) => `/spaces/${paramsInPath.space_ref}/templates`, + { base: getConfig('code/api/v1'), pathParams: { space_ref }, ...props } + ) + export type GetSystemConfigProps = Omit, 'path'> export const GetSystemConfig = (props: GetSystemConfigProps) => ( @@ -4314,6 +4824,109 @@ export type UseGetSystemConfigProps = Omit useGet(`/system/config`, { base: getConfig('code/api/v1'), ...props }) +export type CreateTemplateProps = Omit< + MutateProps, + 'path' | 'verb' +> + +export const CreateTemplate = (props: CreateTemplateProps) => ( + + verb="POST" + path={`/templates`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseCreateTemplateProps = Omit< + UseMutateProps, + 'path' | 'verb' +> + +export const useCreateTemplate = (props: UseCreateTemplateProps) => + useMutate('POST', `/templates`, { + base: getConfig('code/api/v1'), + ...props + }) + +export type DeleteTemplateProps = Omit, 'path' | 'verb'> + +export const DeleteTemplate = (props: DeleteTemplateProps) => ( + + verb="DELETE" + path={`/templates`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseDeleteTemplateProps = Omit, 'path' | 'verb'> + +export const useDeleteTemplate = (props: UseDeleteTemplateProps) => + useMutate('DELETE', `/templates`, { + base: getConfig('code/api/v1'), + ...props + }) + +export interface FindTemplatePathParams { + template_ref: string +} + +export type FindTemplateProps = Omit, 'path'> & + FindTemplatePathParams + +export const FindTemplate = ({ template_ref, ...props }: FindTemplateProps) => ( + + path={`/templates/${template_ref}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseFindTemplateProps = Omit< + UseGetProps, + 'path' +> & + FindTemplatePathParams + +export const useFindTemplate = ({ template_ref, ...props }: UseFindTemplateProps) => + useGet( + (paramsInPath: FindTemplatePathParams) => `/templates/${paramsInPath.template_ref}`, + { base: getConfig('code/api/v1'), pathParams: { template_ref }, ...props } + ) + +export interface UpdateTemplatePathParams { + template_ref: string +} + +export type UpdateTemplateProps = Omit< + MutateProps, + 'path' | 'verb' +> & + UpdateTemplatePathParams + +export const UpdateTemplate = ({ template_ref, ...props }: UpdateTemplateProps) => ( + + verb="PATCH" + path={`/templates/${template_ref}`} + base={getConfig('code/api/v1')} + {...props} + /> +) + +export type UseUpdateTemplateProps = Omit< + UseMutateProps, + 'path' | 'verb' +> & + UpdateTemplatePathParams + +export const useUpdateTemplate = ({ template_ref, ...props }: UseUpdateTemplateProps) => + useMutate( + 'PATCH', + (paramsInPath: UpdateTemplatePathParams) => `/templates/${paramsInPath.template_ref}`, + { base: getConfig('code/api/v1'), pathParams: { template_ref }, ...props } + ) + export type GetUserProps = Omit, 'path'> export const GetUser = (props: GetUserProps) => ( @@ -4347,20 +4960,49 @@ export const useUpdateUser = (props: UseUpdateUserProps) => ...props }) -export type MembershipSpacesProps = Omit, 'path'> +export interface MembershipSpacesQueryParams { + /** + * The substring by which the spaces the users is a member of are filtered. + */ + query?: string + /** + * The order of the output. + */ + order?: 'asc' | 'desc' + /** + * The field by which the spaces the user is a member of are sorted. + */ + sort?: 'created' | 'path' | 'uid' + /** + * The page to return. + */ + page?: number + /** + * The maximum number of results to return. + */ + limit?: number +} + +export type MembershipSpacesProps = Omit< + GetProps, + 'path' +> export const MembershipSpaces = (props: MembershipSpacesProps) => ( - + path={`/user/memberships`} base={getConfig('code/api/v1')} {...props} /> ) -export type UseMembershipSpacesProps = Omit, 'path'> +export type UseMembershipSpacesProps = Omit< + UseGetProps, + 'path' +> export const useMembershipSpaces = (props: UseMembershipSpacesProps) => - useGet(`/user/memberships`, { + useGet(`/user/memberships`, { base: getConfig('code/api/v1'), ...props }) diff --git a/web/src/services/code/swagger.yaml b/web/src/services/code/swagger.yaml index 2aa94c5bd..f4d555cb1 100644 --- a/web/src/services/code/swagger.yaml +++ b/web/src/services/code/swagger.yaml @@ -236,6 +236,178 @@ paths: description: Internal Server Error tags: - admin + /connectors: + post: + operationId: createConnector + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiCreateConnectorRequest' + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesConnector' + description: Created + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - connector + /connectors/{connector_ref}: + delete: + operationId: deleteConnector + parameters: + - in: path + name: connector_ref + required: true + schema: + type: string + responses: + '204': + description: No Content + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - connector + get: + operationId: findConnector + parameters: + - in: path + name: connector_ref + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesConnector' + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - connector + patch: + operationId: updateConnector + parameters: + - in: path + name: connector_ref + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiUpdateConnectorRequest' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesConnector' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - connector /login: post: operationId: onLogin @@ -297,181 +469,9 @@ paths: description: Internal Server Error tags: - account - /pipelines: - post: - operationId: createPipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/OpenapiCreatePipelineRequest' - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/TypesPipeline' - description: Created - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Bad Request - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - /pipelines/{pipeline_ref}: - delete: - operationId: deletePipeline - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - responses: - '204': - description: No Content - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline + /plugins: get: - operationId: findPipeline - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/TypesPipeline' - description: OK - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - patch: - operationId: updatePipeline - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/OpenapiUpdatePipelineRequest' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/TypesPipeline' - description: OK - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Bad Request - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - /pipelines/{pipeline_ref}/executions: - get: - operationId: listExecutions + operationId: listPlugins parameters: - description: The page to return. in: query @@ -490,9 +490,10 @@ paths: maximum: 100 minimum: 1 type: integer - - in: path - name: pipeline_ref - required: true + - description: The substring which is used to filter the plugins by their name. + in: query + name: query + required: false schema: type: string responses: @@ -501,7 +502,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/TypesExecution' + $ref: '#/components/schemas/TypesPlugin' type: array description: OK '401': @@ -529,256 +530,7 @@ paths: $ref: '#/components/schemas/UsererrorError' description: Internal Server Error tags: - - pipeline - post: - operationId: createExecution - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/OpenapiCreateExecutionRequest' - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/TypesExecution' - description: Created - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Bad Request - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - /pipelines/{pipeline_ref}/executions/{execution_number}: - delete: - operationId: deleteExecution - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - - in: path - name: execution_number - required: true - schema: - type: string - responses: - '204': - description: No Content - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - get: - operationId: findExecution - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - - in: path - name: execution_number - required: true - schema: - type: string - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/TypesExecution' - description: OK - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - patch: - operationId: updateExecution - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - - in: path - name: execution_number - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/OpenapiUpdateExecutionRequest' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/TypesExecution' - description: OK - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Bad Request - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline - /pipelines/{pipeline_ref}/executions/{execution_number}/logs/{stage_number}/{step_number}: - get: - operationId: viewLogs - parameters: - - in: path - name: pipeline_ref - required: true - schema: - type: string - - in: path - name: execution_number - required: true - schema: - type: string - - in: path - name: stage_number - required: true - schema: - type: string - - in: path - name: step_number - required: true - schema: - type: string - responses: - '200': - content: - text/plain: - schema: - type: string - description: OK - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - pipeline + - plugins /principals: get: operationId: listPrincipals @@ -2191,6 +1943,891 @@ paths: description: Internal Server Error tags: - repository + /repos/{repo_ref}/pipelines: + get: + operationId: listPipelines + parameters: + - description: The substring which is used to filter the repositories by their + path name. + in: query + name: query + required: false + schema: + type: string + - description: The page to return. + in: query + name: page + required: false + schema: + default: 1 + minimum: 1 + type: integer + - description: The maximum number of results to return. + in: query + name: limit + required: false + schema: + default: 30 + maximum: 100 + minimum: 1 + type: integer + - in: path + name: repo_ref + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + items: + $ref: '#/components/schemas/TypesPipeline' + type: array + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - repos + post: + operationId: createPipeline + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiCreatePipelineRequest' + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesPipeline' + description: Created + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + /repos/{repo_ref}/pipelines/{pipeline_uid}: + delete: + operationId: deletePipeline + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + responses: + '204': + description: No Content + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + get: + operationId: findPipeline + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesPipeline' + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + patch: + operationId: updatePipeline + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiUpdatePipelineRequest' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesPipeline' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + /repos/{repo_ref}/pipelines/{pipeline_uid}/executions: + get: + operationId: listExecutions + parameters: + - description: The page to return. + in: query + name: page + required: false + schema: + default: 1 + minimum: 1 + type: integer + - description: The maximum number of results to return. + in: query + name: limit + required: false + schema: + default: 30 + maximum: 100 + minimum: 1 + type: integer + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + items: + $ref: '#/components/schemas/TypesExecution' + type: array + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + post: + operationId: createExecution + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiCreateExecutionRequest' + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesExecution' + description: Created + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + /repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}: + delete: + operationId: deleteExecution + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: execution_number + required: true + schema: + type: string + responses: + '204': + description: No Content + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + get: + operationId: findExecution + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: execution_number + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesExecution' + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + patch: + operationId: updateExecution + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: execution_number + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiUpdateExecutionRequest' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesExecution' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + /repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}/logs/{stage_number}/{step_number}: + get: + operationId: viewLogs + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: execution_number + required: true + schema: + type: string + - in: path + name: stage_number + required: true + schema: + type: string + - in: path + name: step_number + required: true + schema: + type: string + responses: + '200': + content: + text/plain: + schema: + type: string + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + /repos/{repo_ref}/pipelines/{pipeline_uid}/triggers: + get: + operationId: listTriggers + parameters: + - description: The substring which is used to filter the repositories by their + path name. + in: query + name: query + required: false + schema: + type: string + - description: The page to return. + in: query + name: page + required: false + schema: + default: 1 + minimum: 1 + type: integer + - description: The maximum number of results to return. + in: query + name: limit + required: false + schema: + default: 30 + maximum: 100 + minimum: 1 + type: integer + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + items: + $ref: '#/components/schemas/TypesTrigger' + type: array + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + post: + operationId: createTrigger + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiCreateTriggerRequest' + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesTrigger' + description: Created + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + /repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}: + delete: + operationId: deleteTrigger + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: trigger_uid + required: true + schema: + type: string + responses: + '204': + description: No Content + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + get: + operationId: findTrigger + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: trigger_uid + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesTrigger' + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline + patch: + operationId: updateTrigger + parameters: + - in: path + name: repo_ref + required: true + schema: + type: string + - in: path + name: pipeline_uid + required: true + schema: + type: string + - in: path + name: trigger_uid + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiUpdateTriggerRequest' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesTrigger' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - pipeline /repos/{repo_ref}/pullreq: get: operationId: listPullReq @@ -4324,6 +4961,74 @@ paths: description: Internal Server Error tags: - space + /spaces/{space_ref}/connectors: + get: + operationId: listConnectors + parameters: + - description: The substring which is used to filter the repositories by their + path name. + in: query + name: query + required: false + schema: + type: string + - description: The page to return. + in: query + name: page + required: false + schema: + default: 1 + minimum: 1 + type: integer + - description: The maximum number of results to return. + in: query + name: limit + required: false + schema: + default: 30 + maximum: 100 + minimum: 1 + type: integer + - in: path + name: space_ref + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + items: + $ref: '#/components/schemas/TypesConnector' + type: array + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - space /spaces/{space_ref}/members: get: operationId: membershipList @@ -4756,74 +5461,6 @@ paths: description: Internal Server Error tags: - space - /spaces/{space_ref}/pipelines: - get: - operationId: listPipelines - parameters: - - description: The substring which is used to filter the repositories by their - path name. - in: query - name: query - required: false - schema: - type: string - - description: The page to return. - in: query - name: page - required: false - schema: - default: 1 - minimum: 1 - type: integer - - description: The maximum number of results to return. - in: query - name: limit - required: false - schema: - default: 30 - maximum: 100 - minimum: 1 - type: integer - - in: path - name: space_ref - required: true - schema: - type: string - responses: - '200': - content: - application/json: - schema: - items: - $ref: '#/components/schemas/TypesPipeline' - type: array - description: OK - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Not Found - '500': - content: - application/json: - schema: - $ref: '#/components/schemas/UsererrorError' - description: Internal Server Error - tags: - - space /spaces/{space_ref}/repos: get: operationId: listRepos @@ -5116,6 +5753,74 @@ paths: description: Internal Server Error tags: - space + /spaces/{space_ref}/templates: + get: + operationId: listTemplates + parameters: + - description: The substring which is used to filter the repositories by their + path name. + in: query + name: query + required: false + schema: + type: string + - description: The page to return. + in: query + name: page + required: false + schema: + default: 1 + minimum: 1 + type: integer + - description: The maximum number of results to return. + in: query + name: limit + required: false + schema: + default: 30 + maximum: 100 + minimum: 1 + type: integer + - in: path + name: space_ref + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + items: + $ref: '#/components/schemas/TypesTemplate' + type: array + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - space /system/config: get: operationId: getSystemConfig @@ -5140,6 +5845,178 @@ paths: description: Internal Server Error tags: - system + /templates: + post: + operationId: createTemplate + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiCreateTemplateRequest' + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesTemplate' + description: Created + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - template + /templates/{template_ref}: + delete: + operationId: deleteTemplate + parameters: + - in: path + name: template_ref + required: true + schema: + type: string + responses: + '204': + description: No Content + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - template + get: + operationId: findTemplate + parameters: + - in: path + name: template_ref + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesTemplate' + description: OK + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - template + patch: + operationId: updateTemplate + parameters: + - in: path + name: template_ref + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenapiUpdateTemplateRequest' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TypesTemplate' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Not Found + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/UsererrorError' + description: Internal Server Error + tags: + - template /user: get: operationId: getUser @@ -5183,6 +6060,52 @@ paths: /user/memberships: get: operationId: membershipSpaces + parameters: + - description: The substring by which the spaces the users is a member of are + filtered. + in: query + name: query + required: false + schema: + type: string + - description: The order of the output. + in: query + name: order + required: false + schema: + default: asc + enum: + - asc + - desc + type: string + - description: The field by which the spaces the user is a member of are sorted. + in: query + name: sort + required: false + schema: + default: uid + enum: + - created + - path + - uid + type: string + - description: The page to return. + in: query + name: page + required: false + schema: + default: 1 + minimum: 1 + type: integer + - description: The maximum number of results to return. + in: query + name: limit + required: false + schema: + default: 30 + maximum: 100 + minimum: 1 + type: integer responses: '200': content: @@ -5315,13 +6238,6 @@ components: - merged - open type: string - EnumScmType: - enum: - - GITNESS - - GITHUB - - GITLAB - - UNKNOWN - type: string EnumTokenType: type: string EnumWebhookExecutionResult: @@ -5546,6 +6462,19 @@ components: target: type: string type: object + OpenapiCreateConnectorRequest: + properties: + data: + type: string + description: + type: string + space_ref: + type: string + type: + type: string + uid: + type: string + type: object OpenapiCreateExecutionRequest: properties: status: @@ -5564,12 +6493,6 @@ components: type: string description: type: string - repo_ref: - type: string - repo_type: - $ref: '#/components/schemas/EnumScmType' - space_ref: - type: string uid: type: string type: object @@ -5645,6 +6568,19 @@ components: target: type: string type: object + OpenapiCreateTemplateRequest: + properties: + data: + type: string + description: + type: string + space_ref: + type: string + type: + type: string + uid: + type: string + type: object OpenapiCreateTokenRequest: properties: grants: @@ -5654,6 +6590,13 @@ components: uid: type: string type: object + OpenapiCreateTriggerRequest: + properties: + description: + type: string + uid: + type: string + type: object OpenapiCreateWebhookRequest: properties: description: @@ -5780,6 +6723,15 @@ components: admin: type: boolean type: object + OpenapiUpdateConnectorRequest: + properties: + data: + type: string + description: + type: string + uid: + type: string + type: object OpenapiUpdateExecutionRequest: properties: status: @@ -5828,6 +6780,22 @@ components: nullable: true type: boolean type: object + OpenapiUpdateTemplateRequest: + properties: + data: + type: string + description: + type: string + uid: + type: string + type: object + OpenapiUpdateTriggerRequest: + properties: + description: + type: string + uid: + type: string + type: object OpenapiUpdateWebhookRequest: properties: description: @@ -6076,6 +7044,25 @@ components: title: type: string type: object + TypesConnector: + properties: + created: + type: integer + data: + type: string + description: + type: string + id: + type: integer + space_id: + type: integer + type: + type: string + uid: + type: string + updated: + type: integer + type: object TypesDiffStats: properties: commits: @@ -6248,14 +7235,8 @@ components: type: integer repo_id: type: integer - repo_name: - type: string - repo_type: - $ref: '#/components/schemas/EnumScmType' seq: type: integer - space_id: - type: integer uid: type: string updated: @@ -6263,6 +7244,17 @@ components: version: type: integer type: object + TypesPlugin: + properties: + description: + type: string + logo: + type: string + spec: + type: string + uid: + type: string + type: object TypesPrincipalInfo: properties: created: @@ -6604,6 +7596,23 @@ components: stopped: type: integer type: object + TypesTemplate: + properties: + created: + type: integer + data: + type: string + description: + type: string + id: + type: integer + space_id: + type: integer + uid: + type: string + updated: + type: integer + type: object TypesToken: properties: created_by: @@ -6629,6 +7638,21 @@ components: token: $ref: '#/components/schemas/TypesToken' type: object + TypesTrigger: + properties: + created: + type: integer + description: + type: string + id: + type: integer + pipeline_id: + type: integer + uid: + type: string + updated: + type: integer + type: object TypesUser: properties: admin: diff --git a/web/src/utils/FileUtils.ts b/web/src/utils/FileUtils.ts index 397f45ff1..9d846b9bd 100644 --- a/web/src/utils/FileUtils.ts +++ b/web/src/utils/FileUtils.ts @@ -22,6 +22,13 @@ interface UseFileViewerDecisionResult { isText: boolean } +export interface RepoContentExtended extends RepoFileContent { + size?: number + target?: string + commit_sha?: string + url?: string +} + export function useFileContentViewerDecision({ repoMetadata, gitRef, @@ -32,6 +39,9 @@ export function useFileContentViewerDecision({ const metadata = useMemo(() => { const filename = resourceContent.name as string const extension = filename?.split('.').pop() || '' + const isSymlink = resourceContent?.type === 'symlink' + const isSubmodule = resourceContent?.type === 'submodule' + const isMarkdown = extension.toLowerCase() === 'md' const isPdf = extension.toLowerCase() === 'pdf' const isSVG = extension.toLowerCase() === 'svg' @@ -40,7 +50,9 @@ export function useFileContentViewerDecision({ const isVideo = VideoExtensions.includes(extension.toLowerCase()) const isText = !!( SpecialTextFiles.find(name => name.toLowerCase() === filename?.toLowerCase()) || - TextExtensions.includes(extension.toLowerCase()) + TextExtensions.includes(extension.toLowerCase()) || + isSymlink || + isSubmodule ) const category = isMarkdown ? FileCategory.MARKDOWN @@ -54,12 +66,17 @@ export function useFileContentViewerDecision({ ? FileCategory.AUDIO : isVideo ? FileCategory.VIDEO + : isSymlink + ? FileCategory.SYMLINK + : isSubmodule + ? FileCategory.SUBMODULE : isText ? FileCategory.TEXT : FileCategory.OTHER - const isViewable = isPdf || isSVG || isImage || isAudio || isVideo || isText - const resourceData = resourceContent?.content as RepoFileContent - const isFileTooLarge = resourceData?.size !== resourceData?.data_size + const isViewable = isPdf || isSVG || isImage || isAudio || isVideo || isText || isSubmodule || isSymlink + const resourceData = resourceContent?.content as RepoContentExtended + const isFileTooLarge = + resourceData?.size && resourceData?.data_size ? resourceData?.size !== resourceData?.data_size : false const rawURL = `/code/api/v1/repos/${repoMetadata?.path}/+/raw/${resourcePath}?routingId=${routingId}&git_ref=${gitRef}` return { category, @@ -73,7 +90,8 @@ export function useFileContentViewerDecision({ size: resourceData?.size || 0, // base64 data returned from content API. This snapshot can be truncated by backend - base64Data: resourceData?.data || '', + base64Data: resourceData?.data || resourceData?.target || resourceData?.url || '', + rawURL } }, [resourceContent.content]) // eslint-disable-line react-hooks/exhaustive-deps @@ -91,6 +109,8 @@ export enum FileCategory { AUDIO = 'AUDIO', VIDEO = 'VIDEO', TEXT = 'TEXT', + SYMLINK = 'SYMLINK', + SUBMODULE = 'SUBMODULE', OTHER = 'OTHER' } diff --git a/web/src/utils/Utils.ts b/web/src/utils/Utils.ts index 5e9b1f876..a30be074d 100644 --- a/web/src/utils/Utils.ts +++ b/web/src/utils/Utils.ts @@ -112,12 +112,17 @@ export const timeDistance = (date1 = 0, date2 = 0) => { return '' } + const days = Math.floor(distance / (24 * 3600000)) // 24 hours * 60 minutes * 60 seconds * 1000 milliseconds + distance -= days * 24 * 3600000 const hours = Math.floor(distance / 3600000) distance -= hours * 3600000 const minutes = Math.floor(distance / 60000) distance -= minutes * 60000 const seconds = Math.floor(distance / 1000) - return `${hours ? hours + 'h ' : ''}${minutes ? minutes + 'm' : hours ? '0m' : ''} ${seconds}s` + + return `${days ? days + 'd ' : ''}${hours ? hours + 'h ' : ''}${ + minutes ? minutes + 'm' : hours || days ? '0m' : '' + } ${seconds}s` } const LOCALE = Intl.NumberFormat().resolvedOptions?.().locale || 'en-US'