diff --git a/engine/api/api_routes.go b/engine/api/api_routes.go index 8ee544c605..caddc39c14 100644 --- a/engine/api/api_routes.go +++ b/engine/api/api_routes.go @@ -408,6 +408,8 @@ func (api *API) InitRouter() { r.Handle("/group/{permGroupName}/worker/model", Scope(sdk.AuthConsumerScopeWorkerModel), r.GET(api.getWorkerModelsForGroupHandler)) // Workflows + + r.Handle("/workflow/search", Scope(sdk.AuthConsumerScopeProject), r.GET(api.getSearchWorkflowHandler)) r.Handle("/workflow/hook", Scope(sdk.AuthConsumerScopeHooks), r.GET(api.getWorkflowHooksHandler)) r.Handle("/workflow/hook/model/{model}", ScopeNone(), r.GET(api.getWorkflowHookModelHandler), r.POST(api.postWorkflowHookModelHandler, NeedAdmin(true)), r.PUT(api.putWorkflowHookModelHandler, NeedAdmin(true))) diff --git a/engine/api/application.go b/engine/api/application.go index fca89a4f52..77075fcb37 100644 --- a/engine/api/application.go +++ b/engine/api/application.go @@ -161,8 +161,6 @@ func loadApplicationUsage(ctx context.Context, db gorp.SqlExecutor, projKey, app } usage.Workflows = wf - // TODO: add usage of pipelines and environments - return usage, nil } diff --git a/engine/api/application/dao.go b/engine/api/application/dao.go index 83d5c4d620..238650933a 100644 --- a/engine/api/application/dao.go +++ b/engine/api/application/dao.go @@ -5,6 +5,8 @@ import ( "database/sql" "time" + "github.com/lib/pq" + "github.com/go-gorp/gorp" "github.com/ovh/cds/engine/api/database/gorpmapping" @@ -238,6 +240,16 @@ func LoadAll(db gorp.SqlExecutor, key string, opts ...LoadOptionFunc) ([]sdk.App return getAll(context.Background(), db, opts, query) } +// LoadAllByIDs returns all applications +func LoadAllByIDs(db gorp.SqlExecutor, ids []int64, opts ...LoadOptionFunc) ([]sdk.Application, error) { + query := gorpmapping.NewQuery(` + SELECT application.* + FROM application + WHERE application.id = ANY($1) + ORDER BY application.name ASC`).Args(pq.Int64Array(ids)) + return getAll(context.Background(), db, opts, query) +} + // LoadAllNames returns all application names func LoadAllNames(db gorp.SqlExecutor, projID int64) (sdk.IDNames, error) { query := ` diff --git a/engine/api/ascode/dao.go b/engine/api/ascode/dao.go index a841cad928..88781c785e 100644 --- a/engine/api/ascode/dao.go +++ b/engine/api/ascode/dao.go @@ -4,6 +4,8 @@ import ( "context" "database/sql" + "github.com/lib/pq" + "github.com/go-gorp/gorp" "github.com/ovh/cds/engine/api/database/gorpmapping" @@ -28,6 +30,20 @@ func LoadEventByWorkflowIDAndPullRequest(ctx context.Context, db gorp.SqlExecuto return &event, nil } +func LoadEventsByWorkflowIDs(ctx context.Context, db gorp.SqlExecutor, workflowIDs []int64) ([]sdk.AsCodeEvent, error) { + query := gorpmapping.NewQuery("SELECT * FROM as_code_events where workflow_id = ANY($1)").Args(pq.Int64Array(workflowIDs)) + var events []dbAsCodeEvents + if err := gorpmapping.GetAll(ctx, db, query, &events); err != nil { + return nil, sdk.WrapError(err, "Unable to load as code events") + } + + asCodeEvents := make([]sdk.AsCodeEvent, len(events)) + for i := range events { + asCodeEvents[i] = sdk.AsCodeEvent(events[i]) + } + return asCodeEvents, nil +} + // LoadEventsByWorkflowID returns as code events for the given workflow. func LoadEventsByWorkflowID(ctx context.Context, db gorp.SqlExecutor, workflowID int64) ([]sdk.AsCodeEvent, error) { query := gorpmapping.NewQuery(` diff --git a/engine/api/environment/environment.go b/engine/api/environment/environment.go index c3af4735cc..f488b2b474 100644 --- a/engine/api/environment/environment.go +++ b/engine/api/environment/environment.go @@ -12,6 +12,45 @@ import ( "github.com/ovh/cds/sdk" ) +// LoadAllByIDs load all environment +func LoadAllByIDs(db gorp.SqlExecutor, ids []int64) ([]sdk.Environment, error) { + var envs []sdk.Environment + + query := `SELECT environment.id, environment.name, environment.last_modified, environment.from_repository, project.projectkey + FROM environment + JOIN project ON project.id = environment.project_id + WHERE environment.id = ANY($1) + ORDER by environment.name` + rows, err := db.Query(query, pq.Int64Array(ids)) + if err != nil { + if err == sql.ErrNoRows { + return envs, sdk.WithStack(sdk.ErrNoEnvironment) + } + return envs, sdk.WithStack(err) + } + defer rows.Close() + + for rows.Next() { + var env sdk.Environment + var lastModified time.Time + var projectKey string + if err := rows.Scan(&env.ID, &env.Name, &lastModified, &env.FromRepository, &projectKey); err != nil { + return envs, sdk.WithStack(err) + } + env.LastModified = lastModified.Unix() + env.ProjectKey = projectKey + envs = append(envs, env) + } + rows.Close() + + for i := range envs { + if err := loadDependencies(db, &envs[i]); err != nil { + return envs, err + } + } + return envs, nil +} + // LoadEnvironments load all environment from the given project func LoadEnvironments(db gorp.SqlExecutor, projectKey string) ([]sdk.Environment, error) { var envs []sdk.Environment diff --git a/engine/api/group/gorp_model.go b/engine/api/group/gorp_model.go index 0e28bd380e..80a2541b2d 100644 --- a/engine/api/group/gorp_model.go +++ b/engine/api/group/gorp_model.go @@ -99,5 +99,6 @@ func init() { gorpmapping.New(group{}, "group", true, "id"), gorpmapping.New(LinkGroupUser{}, "group_authentified_user", true, "id"), gorpmapping.New(LinkGroupProject{}, "project_group", true, "id"), + gorpmapping.New(LinkWorkflowGroupPermission{}, "workflow_perm", false), ) } diff --git a/engine/api/group/node_group.go b/engine/api/group/node_group.go index 86ff2971ad..f6affd088c 100644 --- a/engine/api/group/node_group.go +++ b/engine/api/group/node_group.go @@ -5,20 +5,22 @@ import ( "strings" "github.com/go-gorp/gorp" + "github.com/lib/pq" + "github.com/ovh/cds/sdk" ) // LoadGroupsByNode retrieves all groups related to a node -func LoadGroupsByNode(db gorp.SqlExecutor, nodeID int64) ([]sdk.GroupPermission, error) { - query := `SELECT "group".id,"group".name,workflow_node_group.role +func LoadGroupsByNode(db gorp.SqlExecutor, nodeID []int64) (map[int64][]sdk.GroupPermission, error) { + query := `SELECT workflow_node_id, "group".id,"group".name,workflow_node_group.role FROM "group" - JOIN project_group ON "group".id = project_group.group_id - JOIN workflow_perm ON workflow_perm.project_group_id = project_group.id - JOIN workflow_node_group ON workflow_node_group.workflow_group_id = workflow_perm.id - WHERE workflow_node_group.workflow_node_id = $1 + JOIN project_group ON "group".id = project_group.group_id + JOIN workflow_perm ON workflow_perm.project_group_id = project_group.id + JOIN workflow_node_group ON workflow_node_group.workflow_group_id = workflow_perm.id + WHERE workflow_node_group.workflow_node_id = ANY($1) ORDER BY "group".name ASC` - rows, err := db.Query(query, nodeID) + rows, err := db.Query(query, pq.Int64Array(nodeID)) if err != nil { if err == sql.ErrNoRows { return nil, nil @@ -27,19 +29,23 @@ func LoadGroupsByNode(db gorp.SqlExecutor, nodeID int64) ([]sdk.GroupPermission, } defer rows.Close() - var groups []sdk.GroupPermission + var mapGroups = make(map[int64][]sdk.GroupPermission) for rows.Next() { var group sdk.Group var perm int - if err := rows.Scan(&group.ID, &group.Name, &perm); err != nil { - return groups, sdk.WithStack(err) + var nodeID int64 + if err := rows.Scan(&nodeID, &group.ID, &group.Name, &perm); err != nil { + return nil, sdk.WithStack(err) } + + var groups = mapGroups[nodeID] groups = append(groups, sdk.GroupPermission{ Group: group, Permission: perm, }) + mapGroups[nodeID] = groups } - return groups, nil + return mapGroups, nil } // InsertGroupsInNode Link the given groups and the given environment diff --git a/engine/api/group/workflow_group.go b/engine/api/group/workflow_group.go index a56dd21461..5838507f5a 100644 --- a/engine/api/group/workflow_group.go +++ b/engine/api/group/workflow_group.go @@ -6,7 +6,9 @@ import ( "strings" "github.com/go-gorp/gorp" + "github.com/lib/pq" + "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/sdk" ) @@ -148,14 +150,52 @@ func checkAtLeastOneGroupWithWriteRoleOnWorkflow(db gorp.SqlExecutor, wID int64) return nb > 0, err } +type LinkWorkflowGroupPermission struct { + WorkflowID int64 `db:"workflow_id"` + GroupID int64 `db:"group_id"` + GroupName string `db:"group_name"` + Role int `db:"role"` +} + +// LoadWorkflowGroupsByWorkflowIDs returns a map with key: workflowID and value the slite of groups +func LoadWorkflowGroupsByWorkflowIDs(db gorp.SqlExecutor, workflowIDs []int64) (map[int64][]sdk.GroupPermission, error) { + result := make(map[int64][]sdk.GroupPermission, len(workflowIDs)) + query := gorpmapping.NewQuery(` + SELECT workflow_perm.workflow_id, "group".id as "group_id", "group".name as "group_name", workflow_perm.role + FROM "group" + JOIN project_group ON project_group.group_id = "group".id + JOIN workflow_perm ON workflow_perm.project_group_id = project_group.id + WHERE workflow_perm.workflow_id = ANY($1) + ORDER BY workflow_perm.workflow_id, "group".name ASC + `).Args(pq.Int64Array(workflowIDs)) + var dbResultSet = []LinkWorkflowGroupPermission{} + if err := gorpmapping.GetAll(context.Background(), db, query, &dbResultSet); err != nil { + return nil, err + } + + for _, row := range dbResultSet { + perms := result[row.WorkflowID] + perms = append(perms, sdk.GroupPermission{ + Permission: row.Role, + Group: sdk.Group{ + ID: row.GroupID, + Name: row.GroupName, + }, + }) + result[row.WorkflowID] = perms + } + + return result, nil +} + // LoadWorkflowGroups load groups for a workflow func LoadWorkflowGroups(db gorp.SqlExecutor, workflowID int64) ([]sdk.GroupPermission, error) { wgs := []sdk.GroupPermission{} query := `SELECT "group".id, "group".name, workflow_perm.role FROM "group" - JOIN project_group ON project_group.group_id = "group".id - JOIN workflow_perm ON workflow_perm.project_group_id = project_group.id + JOIN project_group ON project_group.group_id = "group".id + JOIN workflow_perm ON workflow_perm.project_group_id = project_group.id WHERE workflow_perm.workflow_id = $1 ORDER BY "group".name ASC` rows, errq := db.Query(query, workflowID) diff --git a/engine/api/integration/dao_project_integration.go b/engine/api/integration/dao_project_integration.go index ea846fce13..4d08bb9e0d 100644 --- a/engine/api/integration/dao_project_integration.go +++ b/engine/api/integration/dao_project_integration.go @@ -3,6 +3,8 @@ package integration import ( "context" + "github.com/lib/pq" + "github.com/go-gorp/gorp" "github.com/ovh/cds/engine/api/database/gorpmapping" @@ -153,6 +155,12 @@ func LoadIntegrationsByProjectID(db gorp.SqlExecutor, id int64) ([]sdk.ProjectIn return loadAll(db, query) } +// LoadIntegrationsByIDs load integration integrations by id +func LoadIntegrationsByIDs(db gorp.SqlExecutor, ids []int64) ([]sdk.ProjectIntegration, error) { + query := gorpmapping.NewQuery("SELECT * from project_integration WHERE id = ANY($1)").Args(pq.Int64Array(ids)) + return loadAll(db, query) +} + // InsertIntegration inserts a integration func InsertIntegration(db gorp.SqlExecutor, pp *sdk.ProjectIntegration) error { oldConfig := pp.Config.Clone() diff --git a/engine/api/notification/user.go b/engine/api/notification/user.go index 637cb63d55..76767fb533 100644 --- a/engine/api/notification/user.go +++ b/engine/api/notification/user.go @@ -120,6 +120,7 @@ func GetUserWorkflowEvents(ctx context.Context, db gorp.SqlExecutor, store cache if err != nil { log.Error(ctx, "notification.GetUserWorkflowEvents> unable to handle event %+v: %v", jn, err) } + log.Debug("GetUserWorkflowEvents> will send mail notifications: %+v", notif) go sendMailNotif(ctx, notif) } } diff --git a/engine/api/permission/dao.go b/engine/api/permission/dao.go index 9ee51ac9a6..3a4c9ca72c 100644 --- a/engine/api/permission/dao.go +++ b/engine/api/permission/dao.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/go-gorp/gorp" + "github.com/lib/pq" "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/engine/api/group" @@ -13,6 +14,33 @@ import ( "github.com/ovh/cds/sdk" ) +func LoadWorkflowMaxLevelPermissionByWorkflowIDs(ctx context.Context, db gorp.SqlExecutor, workflowIDs []int64, groupIDs []int64) (sdk.EntitiesPermissions, error) { + _, end := observability.Span(ctx, "permission.LoadWorkflowMaxLevelPermissionByWorkflowIDs") + defer end() + + query := ` + SELECT workflow.id::text, max(workflow_perm.role) + FROM workflow_perm + JOIN workflow ON workflow.id = workflow_perm.workflow_id + JOIN project ON project.id = workflow.project_id + JOIN project_group ON project_group.id = workflow_perm.project_group_id + WHERE project_group.project_id = project.id + AND workflow.id = ANY($1) + AND project_group.group_id = ANY($2) + GROUP BY workflow.id, workflow.name` + + rows, err := db.Query(query, pq.Int64Array(workflowIDs), pq.Int64Array(groupIDs)) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, sdk.WithStack(err) + } + defer rows.Close() + + return scanPermissions(rows) +} + func LoadWorkflowMaxLevelPermission(ctx context.Context, db gorp.SqlExecutor, projectKey string, workflowNames []string, groupIDs []int64) (sdk.EntitiesPermissions, error) { _, end := observability.Span(ctx, "permission.LoadWorkflowMaxLevelPermission") defer end() diff --git a/engine/api/pipeline/pipeline.go b/engine/api/pipeline/pipeline.go index 3b3a31b78c..0800de3b00 100644 --- a/engine/api/pipeline/pipeline.go +++ b/engine/api/pipeline/pipeline.go @@ -7,6 +7,7 @@ import ( "time" "github.com/go-gorp/gorp" + "github.com/lib/pq" "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/engine/api/group" @@ -239,6 +240,37 @@ func DeletePipeline(ctx context.Context, db gorp.SqlExecutor, pipelineID int64) return nil } +// LoadAllByIDs loads all pipelines +func LoadAllByIDs(db gorp.SqlExecutor, ids []int64, loadDependencies bool) ([]sdk.Pipeline, error) { + var pips []sdk.Pipeline + query := `SELECT id, name, description, project_id, last_modified, from_repository + FROM pipeline + WHERE id = ANY($1) + ORDER BY pipeline.name` + + if _, err := db.Select(&pips, query, pq.Int64Array(ids)); err != nil { + if err == sql.ErrNoRows { + return nil, nil + } + return nil, sdk.WithStack(err) + } + + for i := range pips { + if loadDependencies { + if err := LoadPipelineStage(context.TODO(), db, &pips[i]); err != nil { + return nil, err + } + } + params, err := GetAllParametersInPipeline(context.TODO(), db, pips[i].ID) + if err != nil { + return nil, err + } + pips[i].Parameter = params + } + + return pips, nil +} + // LoadPipelines loads all pipelines in a project func LoadPipelines(db gorp.SqlExecutor, projectID int64, loadDependencies bool) ([]sdk.Pipeline, error) { var pips []sdk.Pipeline diff --git a/engine/api/repositories_manager.go b/engine/api/repositories_manager.go index 3d80923242..c4c2a022f0 100644 --- a/engine/api/repositories_manager.go +++ b/engine/api/repositories_manager.go @@ -557,7 +557,7 @@ func (api *API) attachRepositoriesManagerHandler() service.Handler { wfDB.WorkflowData.Node.Context.DefaultPayload = defaultPayload if err := workflow.Update(ctx, db, api.Cache, *proj, wfDB, workflow.UpdateOptions{DisableHookManagement: true}); err != nil { - return sdk.WrapError(err, "cannot update node context %d", wf.WorkflowData.Node.Context.ID) + return sdk.WrapError(err, "cannot update node context %d", wfDB.WorkflowData.Node.Context.ID) } event.PublishWorkflowUpdate(ctx, proj.Key, *wfDB, *wfOld, getAPIConsumer(ctx)) diff --git a/engine/api/repositories_manager_test.go b/engine/api/repositories_manager_test.go index bc25f5d548..8df7860a2b 100644 --- a/engine/api/repositories_manager_test.go +++ b/engine/api/repositories_manager_test.go @@ -219,16 +219,21 @@ vcs_ssh_key: proj-blabla PurgeTags: []string{"git.branch"}, } + t.Log("Inserting workflow=====") + test.NoError(t, workflow.Insert(context.TODO(), db, api.Cache, *proj, &w)) w1, err := workflow.Load(context.TODO(), db, api.Cache, *proj, "test_1", workflow.LoadOptions{ DeepPipeline: true, }) test.NoError(t, err) + t.Log("Inserting workflow run=====") + // creates a run wr, errWR := workflow.CreateRun(db, w1, nil, u) assert.NoError(t, errWR) wr.Workflow = *w1 + t.Log("Starting workflow run=====") _, errWr := workflow.StartWorkflowRun(context.TODO(), db, api.Cache, *proj, wr, &sdk.WorkflowRunPostHandlerOption{ Manual: &sdk.WorkflowNodeRunManual{ Username: u.Username, @@ -245,6 +250,8 @@ vcs_ssh_key: proj-blabla "applicationName": app.Name, } + t.Log("Trying to detach=====") + uri := router.GetRoute("POST", api.detachRepositoriesManagerHandler, vars) req, err := http.NewRequest("POST", uri, nil) @@ -257,6 +264,8 @@ vcs_ssh_key: proj-blabla // as there is one repository webhook attached, 403 is expected assert.Equal(t, 403, rw.Code) + t.Log("Loading the workflow=====") + w2, err := workflow.Load(context.TODO(), db, api.Cache, *proj, "test_1", workflow.LoadOptions{}) test.NoError(t, err) @@ -269,7 +278,8 @@ vcs_ssh_key: proj-blabla } w2.WorkflowData.Node.Hooks = append(w2.WorkflowData.Node.Hooks[:index], w2.WorkflowData.Node.Hooks[index+1:]...) - // save the workflow with the repositorywebhok deleted + // save the workflow with the repositorywebhook deleted + t.Log("Updating the workflo without the repositorywebhook=====") test.NoError(t, workflow.Update(context.TODO(), db, api.Cache, *proj, w2, workflow.UpdateOptions{})) req, err = http.NewRequest("POST", uri, nil) diff --git a/engine/api/router_middleware_auth.go b/engine/api/router_middleware_auth.go index 076c83e96b..9125643195 100644 --- a/engine/api/router_middleware_auth.go +++ b/engine/api/router_middleware_auth.go @@ -178,8 +178,6 @@ func (api *API) jwtMiddleware(ctx context.Context, w http.ResponseWriter, req *h ctx, end := observability.Span(ctx, "router.jwtMiddleware") defer end() - log.Debug("jwtMiddleware> try to find a jwt token in cookie or header") - var jwtRaw string var jwtFromCookie bool // Try to get the jwt from the cookie firstly then from the authorization bearer header, a XSRF token with cookie diff --git a/engine/api/templates.go b/engine/api/templates.go index d00c495339..bd307dc936 100644 --- a/engine/api/templates.go +++ b/engine/api/templates.go @@ -1014,7 +1014,7 @@ func (api *API) getTemplateUsageHandler() service.Handler { mProjectIDs[ps[i].ID] = struct{}{} } - filteredWorkflow := []sdk.Workflow{} + filteredWorkflow := []sdk.WorkflowName{} for i := range wfs { if _, ok := mProjectIDs[wfs[i].ProjectID]; ok { filteredWorkflow = append(filteredWorkflow, wfs[i]) diff --git a/engine/api/timeline.go b/engine/api/timeline.go index 82db12ae3a..98225f287d 100644 --- a/engine/api/timeline.go +++ b/engine/api/timeline.go @@ -44,7 +44,7 @@ func (api *API) getTimelineHandler() service.Handler { return err } - ws, err := workflow.LoadAllByProjectIDs(ctx, api.mustDB(), sdk.ProjectsToIDs(ps)) + ws, err := workflow.LoadAllNamesByProjectIDs(ctx, api.mustDB(), sdk.ProjectsToIDs(ps)) if err != nil { return err } diff --git a/engine/api/workflow.go b/engine/api/workflow.go index 87723441bc..6b8eca0fd0 100644 --- a/engine/api/workflow.go +++ b/engine/api/workflow.go @@ -12,6 +12,7 @@ import ( "github.com/go-gorp/gorp" "github.com/gorilla/mux" + "github.com/ovh/cds/engine/api/application" "github.com/ovh/cds/engine/api/environment" "github.com/ovh/cds/engine/api/event" @@ -31,38 +32,33 @@ import ( func (api *API) getWorkflowsHandler() service.Handler { return func(ctx context.Context, w http.ResponseWriter, r *http.Request) error { vars := mux.Vars(r) - key := vars[permProjectKey] + filterByProject := vars[permProjectKey] filterByRepo := r.FormValue("repo") - ws, err := workflow.LoadAll(api.mustDB(), key) - if err != nil { - return err + var dao workflow.WorkflowDAO + if filterByProject != "" { + dao.Filters.ProjectKey = filterByProject } if filterByRepo != "" { - mapApps := make(map[int64]sdk.Application) - apps, err := application.LoadAll(api.mustDB(), key) - if err != nil { - return err - } + dao.Filters.ApplicationRepository = filterByRepo + } - for _, app := range apps { - mapApps[app.ID] = app - } + dao.Loaders.WithFavoritesForUserID = getAPIConsumer(ctx).AuthentifiedUserID + + groupIDS := getAPIConsumer(ctx).GetGroupIDs() + dao.Filters.GroupIDs = groupIDS + if isMaintainer(ctx) { + dao.Filters.GroupIDs = nil + } - ws = ws.Filter( - func(w sdk.Workflow) bool { - if w.WorkflowData.Node.Context != nil { - app, _ := mapApps[w.WorkflowData.Node.Context.ApplicationID] - return app.RepositoryFullname == filterByRepo - } - return false - }, - ) + ws, err := dao.LoadAll(ctx, api.mustDBWithCtx(ctx)) + if err != nil { + return err } - names := ws.Names() - perms, err := permission.LoadWorkflowMaxLevelPermission(ctx, api.mustDB(), key, names, getAPIConsumer(ctx).GetGroupIDs()) + ids := ws.IDs() + perms, err := permission.LoadWorkflowMaxLevelPermissionByWorkflowIDs(ctx, api.mustDB(), ids, groupIDS) if err != nil { return err } @@ -71,11 +67,16 @@ func (api *API) getWorkflowsHandler() service.Handler { if isAdmin(ctx) { ws[i].Permissions = sdk.Permissions{Readable: true, Writable: true, Executable: true} } else { - ws[i].Permissions = perms.Permissions(ws[i].Name) + idString := strconv.FormatInt(ws[i].ID, 10) + ws[i].Permissions = perms.Permissions(idString) if isMaintainer(ctx) { ws[i].Permissions.Readable = true } } + + w1 := &ws[i] + w1.URLs.APIURL = api.Config.URL.API + api.Router.GetRoute("GET", api.getWorkflowHandler, map[string]string{"key": w1.ProjectKey, "permWorkflowName": w1.Name}) + w1.URLs.UIURL = api.Config.URL.UI + "/project/" + w1.ProjectKey + "/workflow/" + w1.Name } return service.WriteJSON(w, ws, http.StatusOK) @@ -103,24 +104,20 @@ func (api *API) getWorkflowHandler() service.Handler { } opts := workflow.LoadOptions{ - Minimal: minimal, // if true, load only data from table workflow, not pipelines, app, env... - DeepPipeline: withDeepPipelines, - WithIcon: !withoutIcons, - WithLabels: withLabels, - WithAsCodeUpdateEvent: withAsCodeEvents, - WithIntegrations: true, - WithTemplate: withTemplate, + Minimal: minimal, // if true, load only data from table workflow, not pipelines, app, env... + DeepPipeline: withDeepPipelines, + WithIcon: !withoutIcons, + WithLabels: withLabels, + WithAsCodeUpdateEvent: withAsCodeEvents, + WithIntegrations: true, + WithTemplate: withTemplate, + WithFavoritesForUserID: getAPIConsumer(ctx).AuthentifiedUserID, } w1, err := workflow.Load(ctx, api.mustDB(), api.Cache, *proj, name, opts) if err != nil { return sdk.WrapError(err, "cannot load workflow %s", name) } - w1.Favorite, err = workflow.IsFavorite(api.mustDB(), w1, getAPIConsumer(ctx).AuthentifiedUserID) - if err != nil { - return err - } - if withUsage { usage, err := loadWorkflowUsage(api.mustDB(), w1.ID) if err != nil { @@ -285,7 +282,7 @@ func (api *API) postWorkflowLabelHandler() service.Handler { return sdk.NewErrorFrom(sdk.ErrWrongRequest, "label ID or label name should not be empty") } - lbl, err := project.LabelByName(db, proj.ID, label.Name) + lbl, err := project.LabelByName(tx, proj.ID, label.Name) if err != nil { if sdk.Cause(err) != sql.ErrNoRows { return sdk.WrapError(err, "cannot load label by name") @@ -299,7 +296,7 @@ func (api *API) postWorkflowLabelHandler() service.Handler { } } - wf, err := workflow.Load(ctx, db, api.Cache, *proj, workflowName, workflow.LoadOptions{WithLabels: true}) + wf, err := workflow.Load(ctx, tx, api.Cache, *proj, workflowName, workflow.LoadOptions{WithLabels: true}) if err != nil { return sdk.WrapError(err, "cannot load workflow %s/%s", key, workflowName) } @@ -605,26 +602,37 @@ func (api *API) deleteWorkflowHandler() service.Handler { consumer := getAPIConsumer(ctx) sdk.GoRoutine(api.Router.Background, "deleteWorkflowHandler", func(ctx context.Context) { - txg, errT := api.mustDB().Begin() - if errT != nil { - log.Error(ctx, "deleteWorkflowHandler> Cannot start transaction: %v", errT) + txg, err := api.mustDB().Begin() + if err != nil { + log.Error(ctx, "deleteWorkflowHandler> Cannot start transaction: %v", err) + return } defer txg.Rollback() // nolint - oldW, err := workflow.Load(ctx, txg, api.Cache, *p, name, workflow.LoadOptions{}) + var dao workflow.WorkflowDAO + dao.Filters.ProjectKey = p.Key + dao.Filters.WorkflowName = name + dao.Filters.DisableFilterDeletedWorkflow = true + + oldW, err := dao.Load(ctx, txg) if err != nil { - log.Error(ctx, "deleteWorkflowHandler> unable to load workflow: %v", err) + log.Error(ctx, "deleteWorkflowHandler> unable to load workflow for deletion: %v", err) + return + } + + if err := workflow.CompleteWorkflow(ctx, txg, &oldW, *p, workflow.LoadOptions{}); err != nil { + log.Error(ctx, "deleteWorkflowHandler> unable to load workflow: not found") return } - if err := workflow.Delete(ctx, txg, api.Cache, *p, oldW); err != nil { + if err := workflow.Delete(ctx, txg, api.Cache, *p, &oldW); err != nil { log.Error(ctx, "deleteWorkflowHandler> unable to delete workflow: %v", err) return } if err := txg.Commit(); err != nil { log.Error(ctx, "deleteWorkflowHandler> Cannot commit transaction: %v", err) } - event.PublishWorkflowDelete(ctx, key, *oldW, consumer) + event.PublishWorkflowDelete(ctx, key, oldW, consumer) }, api.PanicDump()) return service.WriteJSON(w, nil, http.StatusOK) @@ -759,3 +767,49 @@ func (api *API) getWorkflowNotificationsConditionsHandler() service.Handler { return service.WriteJSON(w, data, http.StatusOK) } } + +func (api *API) getSearchWorkflowHandler() service.Handler { + return func(ctx context.Context, w http.ResponseWriter, r *http.Request) error { + var dao workflow.WorkflowDAO + dao.Filters.ProjectKey = r.FormValue("project") + dao.Filters.WorkflowName = r.FormValue("name") + dao.Filters.VCSServer = r.FormValue("vcs") + dao.Filters.ApplicationRepository = r.FormValue("repository") + dao.Loaders.WithFavoritesForUserID = getAPIConsumer(ctx).AuthentifiedUserID + + groupIDS := getAPIConsumer(ctx).GetGroupIDs() + dao.Filters.GroupIDs = groupIDS + if isMaintainer(ctx) { + dao.Filters.GroupIDs = nil + } + + ws, err := dao.LoadAll(ctx, api.mustDBWithCtx(ctx)) + if err != nil { + return err + } + + ids := ws.IDs() + perms, err := permission.LoadWorkflowMaxLevelPermissionByWorkflowIDs(ctx, api.mustDB(), ids, groupIDS) + if err != nil { + return err + } + + for i := range ws { + if isAdmin(ctx) { + ws[i].Permissions = sdk.Permissions{Readable: true, Writable: true, Executable: true} + } else { + idString := strconv.FormatInt(ws[i].ID, 10) + ws[i].Permissions = perms.Permissions(idString) + if isMaintainer(ctx) { + ws[i].Permissions.Readable = true + } + } + + w1 := &ws[i] + w1.URLs.APIURL = api.Config.URL.API + api.Router.GetRoute("GET", api.getWorkflowHandler, map[string]string{"key": w1.ProjectKey, "permWorkflowName": w1.Name}) + w1.URLs.UIURL = api.Config.URL.UI + "/project/" + w1.ProjectKey + "/workflow/" + w1.Name + } + + return service.WriteJSON(w, ws, http.StatusOK) + } +} diff --git a/engine/api/workflow/aggregate_test.go b/engine/api/workflow/aggregate_test.go deleted file mode 100644 index f186ec81bb..0000000000 --- a/engine/api/workflow/aggregate_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package workflow_test - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/ovh/cds/engine/api/test" - "github.com/ovh/cds/engine/api/workflow" - "github.com/ovh/cds/sdk" -) - -func TestAggregateOnWorkflowTemplateInstance(t *testing.T) { - db := &test.SqlExecutorMock{} - db.OnSelect = func(i interface{}) { - gs := i.(*[]workflow.Workflow) - *gs = append(*gs, workflow.Workflow{ - ID: 1, - Name: "wkf-1", - }, workflow.Workflow{ - ID: 2, - Name: "wkf-2", - }) - } - - ids := []int64{1, 2} - wtis := []*sdk.WorkflowTemplateInstance{ - {WorkflowID: &ids[0]}, - {WorkflowID: &ids[1]}, - } - - assert.Nil(t, workflow.AggregateOnWorkflowTemplateInstance(context.TODO(), db, wtis...)) - - assert.NotNil(t, wtis[0].Workflow) - assert.Equal(t, "wkf-1", wtis[0].Workflow.Name) - assert.NotNil(t, wtis[1].Workflow) - assert.Equal(t, "wkf-2", wtis[1].Workflow.Name) -} diff --git a/engine/api/workflow/dao.go b/engine/api/workflow/dao.go index 998454d595..6dcd15c28e 100644 --- a/engine/api/workflow/dao.go +++ b/engine/api/workflow/dao.go @@ -10,69 +10,43 @@ import ( "strings" "time" - "github.com/ovh/cds/sdk/exportentities" - "github.com/go-gorp/gorp" + "github.com/lib/pq" "github.com/ovh/cds/engine/api/application" - "github.com/ovh/cds/engine/api/ascode" "github.com/ovh/cds/engine/api/cache" - "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/engine/api/environment" "github.com/ovh/cds/engine/api/group" "github.com/ovh/cds/engine/api/integration" "github.com/ovh/cds/engine/api/keys" "github.com/ovh/cds/engine/api/observability" "github.com/ovh/cds/engine/api/pipeline" - "github.com/ovh/cds/engine/api/workflowtemplate" "github.com/ovh/cds/sdk" + "github.com/ovh/cds/sdk/exportentities" "github.com/ovh/cds/sdk/log" ) -func getAll(ctx context.Context, db gorp.SqlExecutor, q gorpmapping.Query) (sdk.Workflows, error) { - res := []Workflow{} - - if err := gorpmapping.GetAll(ctx, db, q, &res); err != nil { - return nil, sdk.WrapError(err, "cannot get workflows") - } +// LoadAllByProjectIDs returns all workflow for given project ids. +func LoadAllNamesByProjectIDs(ctx context.Context, db gorp.SqlExecutor, projectIDs []int64) ([]sdk.WorkflowName, error) { + query := ` + SELECT workflow.*, project.projectkey + FROM workflow + JOIN project ON project.id = workflow.project_id + WHERE project_id = ANY($1)` - ws := make([]sdk.Workflow, 0, len(res)) - for i := range res { - ws = append(ws, sdk.Workflow(res[i])) + var result []sdk.WorkflowName // This struct is not registered as a gorpmapping entity so we can't use gorpmapping.Query + _, err := db.Select(&result, query, pq.Int64Array(projectIDs)) + if err == sql.ErrNoRows { + return result, nil } - return ws, nil -} - -// LoadAllByProjectIDs returns all workflow for given project ids. -func LoadAllByProjectIDs(ctx context.Context, db gorp.SqlExecutor, projectIDs []int64) (sdk.Workflows, error) { - query := gorpmapping.NewQuery(` - SELECT * - FROM workflow - WHERE project_id = ANY(string_to_array($1, ',')::int[]) - `).Args(gorpmapping.IDsToQueryString(projectIDs)) - return getAll(ctx, db, query) + return result, sdk.WithStack(err) } // LoadAllByIDs returns all workflows by ids. func LoadAllByIDs(ctx context.Context, db gorp.SqlExecutor, ids []int64) (sdk.Workflows, error) { - query := gorpmapping.NewQuery(` - SELECT * - FROM workflow - WHERE id = ANY(string_to_array($1, ',')::int[]) - `).Args(gorpmapping.IDsToQueryString(ids)) - return getAll(ctx, db, query) -} - -// LoadOptions custom option for loading workflow -type LoadOptions struct { - Minimal bool - DeepPipeline bool - Base64Keys bool - WithLabels bool - WithIcon bool - WithAsCodeUpdateEvent bool - WithIntegrations bool - WithTemplate bool + var dao WorkflowDAO + dao.Filters.WorkflowIDs = ids + return dao.LoadAll(ctx, db) } // UpdateOptions is option to parse a workflow @@ -80,12 +54,6 @@ type UpdateOptions struct { DisableHookManagement bool } -// CountVarInWorkflowData represents the result of CountVariableInWorkflow function -type CountVarInWorkflowData struct { - WorkflowName string `db:"workflow_name"` - NodeName string `db:"node_name"` -} - // Exists checks if a workflow exists func Exists(db gorp.SqlExecutor, key string, name string) (bool, error) { query := ` @@ -102,21 +70,25 @@ func Exists(db gorp.SqlExecutor, key string, name string) (bool, error) { } func LoadByRepo(ctx context.Context, store cache.Store, db gorp.SqlExecutor, proj sdk.Project, repo string, opts LoadOptions) (*sdk.Workflow, error) { - query := ` - SELECT workflow.* - FROM workflow - JOIN project ON project.id = workflow.project_id - WHERE project.projectkey = $1 AND workflow.from_repository = $2 - LIMIT 1 - ` - w, err := load(ctx, db, proj, opts, query, proj.Key, repo) + ctx, end := observability.Span(ctx, "workflow.Load") + defer end() + + dao := opts.GetWorkflowDAO() + dao.Filters.FromRepository = repo + dao.Limit = 1 + + ws, err := dao.Load(ctx, db) if err != nil { return nil, err } - if err := IsValid(ctx, store, db, w, proj, LoadOptions{}); err != nil { - return nil, sdk.WrapError(err, "unable to validate workflow") + + if !opts.Minimal { + if err := CompleteWorkflow(ctx, db, &ws, proj, opts); err != nil { + return nil, err + } } - return w, nil + + return &ws, nil } // UpdateIcon update the icon of a workflow @@ -160,43 +132,20 @@ func (w *Workflow) PostInsert(db gorp.SqlExecutor) error { // PostGet is a db hook func (w *Workflow) PostGet(db gorp.SqlExecutor) error { - var res = struct { - Metadata sql.NullString `db:"metadata"` - PurgeTags sql.NullString `db:"purge_tags"` - WorkflowData sql.NullString `db:"workflow_data"` - }{} - - if err := db.SelectOne(&res, "SELECT metadata, purge_tags, workflow_data FROM workflow WHERE id = $1", w.ID); err != nil { - return sdk.WrapError(err, "PostGet> Unable to load marshalled workflow") - } - - metadata := sdk.Metadata{} - if err := gorpmapping.JSONNullString(res.Metadata, &metadata); err != nil { - return err - } - w.Metadata = metadata + nodes := w.WorkflowData.Array() + nodesID := make([]int64, len(nodes)) - purgeTags := []string{} - if err := gorpmapping.JSONNullString(res.PurgeTags, &purgeTags); err != nil { - return err + for i := range nodes { + nodesID[i] = nodes[i].ID } - w.PurgeTags = purgeTags - data := sdk.WorkflowData{} - if err := gorpmapping.JSONNullString(res.WorkflowData, &data); err != nil { - return sdk.WrapError(err, "Unable to unmarshall workflow data") - } - if data.Node.ID != 0 { - w.WorkflowData = data + mapGroups, err := group.LoadGroupsByNode(db, nodesID) + if err != nil { + return sdk.WrapError(err, "cannot load node groups") } - nodes := w.WorkflowData.Array() for i := range nodes { - var err error - nodes[i].Groups, err = group.LoadGroupsByNode(db, nodes[i].ID) - if err != nil { - return sdk.WrapError(err, "cannot load node groups") - } + nodes[i].Groups = mapGroups[nodes[i].ID] } return nil @@ -212,67 +161,35 @@ func (w *Workflow) PreUpdate(db gorp.SqlExecutor) error { fromRepoURL.User = nil w.FromRepository = fromRepoURL.String() } - return nil } // PostUpdate is a db hook func (w *Workflow) PostUpdate(db gorp.SqlExecutor) error { - if err := UpdateMetadata(db, w.ID, w.Metadata); err != nil { - return err - } - - pt, errPt := json.Marshal(w.PurgeTags) - if errPt != nil { - return errPt - } - - data, errD := gorpmapping.JSONToNullString(w.WorkflowData) - if errD != nil { - return sdk.WrapError(errD, "Workflow.PostUpdate> Unable to marshall workflow data") - } - if _, err := db.Exec("update workflow set purge_tags = $1, workflow_data = $3 where id = $2", pt, w.ID, data); err != nil { - return err - } - for _, integ := range w.EventIntegrations { if err := integration.AddOnWorkflow(db, w.ID, integ.ID); err != nil { return sdk.WrapError(err, "cannot add project event integration on workflow") } } - return nil } -// LoadAll loads all workflows for a project. All users in a project can list all workflows in a project -func LoadAll(db gorp.SqlExecutor, projectKey string) (sdk.Workflows, error) { - res := sdk.Workflows{} - dbRes := []Workflow{} - - query := ` - select workflow.* - from workflow - join project on project.id = workflow.project_id - where project.projectkey = $1 - and workflow.to_delete = false - order by workflow.name asc` - - if _, err := db.Select(&dbRes, query, projectKey); err != nil { - if err == sql.ErrNoRows { - return nil, sdk.WithStack(sdk.ErrNotFound) - } - return nil, sdk.WrapError(err, "Unable to load workflows project %s", projectKey) +func (w *Workflow) Get() sdk.Workflow { + wf := w.Workflow + if wf.ProjectKey == "" { + wf.ProjectKey = w.ProjectKey } + return wf +} - for _, w := range dbRes { - w.ProjectKey = projectKey - if err := w.PostGet(db); err != nil { - return nil, sdk.WrapError(err, "Unable to execute post get") - } - res = append(res, sdk.Workflow(w)) +// LoadAll loads all workflows for a project. All users in a project can list all workflows in a project +func LoadAll(db gorp.SqlExecutor, projectKey string) (sdk.Workflows, error) { + dao := WorkflowDAO{ + Filters: LoadAllWorkflowsOptionsFilters{ + ProjectKey: projectKey, + }, } - - return res, nil + return dao.LoadAll(context.Background(), db) } // LoadAllNames loads all workflow names for a project. @@ -293,7 +210,7 @@ func LoadAllNames(db gorp.SqlExecutor, projID int64) (sdk.IDNames, error) { } for i := range res { var err error - res[i].Labels, err = Labels(db, res[i].ID) + res[i].Labels, err = LoadLabels(db, res[i].ID) if err != nil { return res, sdk.WrapError(err, "cannot load labels for workflow %s", res[i].Name) } @@ -304,325 +221,72 @@ func LoadAllNames(db gorp.SqlExecutor, projID int64) (sdk.IDNames, error) { // Load loads a workflow for a given user (ie. checking permissions) func Load(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, name string, opts LoadOptions) (*sdk.Workflow, error) { - ctx, end := observability.Span(ctx, "workflow.Load", - observability.Tag(observability.TagWorkflow, name), - observability.Tag(observability.TagProjectKey, proj.Key), - observability.Tag("minimal", opts.Minimal), - observability.Tag("with_pipeline", opts.DeepPipeline), - observability.Tag("with_base64_keys", opts.Base64Keys), - ) + ctx, end := observability.Span(ctx, "workflow.Load") defer end() - var icon string - if !opts.Minimal { - if opts.WithIcon { - icon = "workflow.icon," - } - } else { - // if minimal, reset load options to load only from table workflow - opts = LoadOptions{Minimal: true} - } - - query := fmt.Sprintf(` - select workflow.id, - workflow.project_id, - workflow.name, - workflow.description, - %s - workflow.last_modified, - workflow.root_node_id, - workflow.metadata, - workflow.history_length, - workflow.purge_tags, - workflow.from_repository, - workflow.derived_from_workflow_id, - workflow.derived_from_workflow_name, - workflow.derivation_branch, - workflow.to_delete - from workflow - join project on project.id = workflow.project_id - where project.projectkey = $1 - and workflow.name = $2`, icon) - res, err := load(ctx, db, proj, opts, query, proj.Key, name) + dao := opts.GetWorkflowDAO() + dao.Filters.ProjectKey = proj.Key + dao.Filters.WorkflowName = name + + ws, err := dao.Load(ctx, db) if err != nil { - return nil, sdk.WrapError(err, "Unable to load workflow %s in project %s", name, proj.Key) + return nil, err } - res.ProjectKey = proj.Key if !opts.Minimal { - if err := IsValid(ctx, store, db, res, proj, opts); err != nil { - return nil, sdk.WrapError(err, "Unable to valid workflow") + if err := CompleteWorkflow(ctx, db, &ws, proj, opts); err != nil { + return nil, err } } - return res, nil + return &ws, nil } // LoadAndLockByID loads a workflow func LoadAndLockByID(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, id int64, opts LoadOptions) (*sdk.Workflow, error) { - query := ` - select * - from workflow - where id = $1 for update skip locked` - res, err := load(ctx, db, proj, opts, query, id) - if err != nil { - return nil, sdk.WrapError(err, "Unable to load workflow %d", id) - } - - if err := IsValid(context.TODO(), store, db, res, proj, opts); err != nil { - return nil, sdk.WrapError(err, "Unable to valid workflow") - } - return res, nil -} + dao := opts.GetWorkflowDAO() + dao.Filters.WorkflowIDs = []int64{id} + dao.Lock = true -// LoadByID loads a workflow -func LoadByID(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, id int64, opts LoadOptions) (*sdk.Workflow, error) { - query := ` - select * - from workflow - where id = $1` - res, err := load(ctx, db, proj, opts, query, id) + ws, err := dao.Load(ctx, db) if err != nil { - return nil, sdk.WrapError(err, "Unable to load workflow %d", id) - } - - if err := IsValid(context.TODO(), store, db, res, proj, opts); err != nil { - return nil, sdk.WrapError(err, "Unable to valid workflow") - } - return res, nil -} - -// LoadByPipelineName loads a workflow for a given project key and pipeline name (ie. checking permissions) -func LoadByPipelineName(ctx context.Context, db gorp.SqlExecutor, projectKey string, pipName string) (sdk.Workflows, error) { - dbRes := []Workflow{} - query := ` - select distinct workflow.* - from workflow - join project on project.id = workflow.project_id - join w_node on w_node.workflow_id = workflow.id - join w_node_context on w_node_context.node_id = w_node.id - join pipeline on pipeline.id = w_node_context.pipeline_id - where project.projectkey = $1 and pipeline.name = $2 - and workflow.to_delete = false - order by workflow.name asc` - - if _, err := db.Select(&dbRes, query, projectKey, pipName); err != nil { - if err == sql.ErrNoRows { - return sdk.Workflows{}, nil - } - return nil, sdk.WrapError(err, "Unable to load workflows for project %s and pipeline %s", projectKey, pipName) - } - - res := make(sdk.Workflows, len(dbRes)) - for i, w := range dbRes { - w.ProjectKey = projectKey - res[i] = sdk.Workflow(w) - } - - return res, nil -} - -// LoadByApplicationName loads a workflow for a given project key and application name (ie. checking permissions) -func LoadByApplicationName(ctx context.Context, db gorp.SqlExecutor, projectKey string, appName string) (sdk.Workflows, error) { - dbRes := []Workflow{} - query := ` - select distinct workflow.* - from workflow - join project on project.id = workflow.project_id - join w_node on w_node.workflow_id = workflow.id - join w_node_context on w_node_context.node_id = w_node.id - join application on w_node_context.application_id = application.id - where project.projectkey = $1 and application.name = $2 - and workflow.to_delete = false - order by workflow.name asc` - - if _, err := db.Select(&dbRes, query, projectKey, appName); err != nil { - if err == sql.ErrNoRows { - return sdk.Workflows{}, nil - } - return nil, sdk.WrapError(err, "Unable to load workflows for project %s and application %s", projectKey, appName) - } - - res := make(sdk.Workflows, len(dbRes)) - for i, w := range dbRes { - w.ProjectKey = projectKey - res[i] = sdk.Workflow(w) - } - - return res, nil -} - -// LoadByEnvName loads a workflow for a given project key and environment name (ie. checking permissions) -func LoadByEnvName(ctx context.Context, db gorp.SqlExecutor, projectKey string, envName string) (sdk.Workflows, error) { - dbRes := []Workflow{} - query := ` - select distinct workflow.* - from workflow - join project on project.id = workflow.project_id - join w_node on w_node.workflow_id = workflow.id - join w_node_context on w_node_context.node_id = w_node.id - join environment on w_node_context.environment_id = environment.id - where project.projectkey = $1 and environment.name = $2 - and workflow.to_delete = false - order by workflow.name asc` - - if _, err := db.Select(&dbRes, query, projectKey, envName); err != nil { - if err == sql.ErrNoRows { - return sdk.Workflows{}, nil - } - return nil, sdk.WrapError(err, "Unable to load workflows for project %s and environment %s", projectKey, envName) - } - - res := make(sdk.Workflows, len(dbRes)) - for i, w := range dbRes { - w.ProjectKey = projectKey - res[i] = sdk.Workflow(w) - } - - return res, nil -} - -func loadByWorkflowTemplateID(ctx context.Context, db gorp.SqlExecutor, query string, args []interface{}) (sdk.Workflows, error) { - var dbRes []Workflow - if _, err := db.Select(&dbRes, query, args...); err != nil { - if err == sql.ErrNoRows { - return nil, nil - } return nil, err } - workflows := make(sdk.Workflows, len(dbRes)) - for i, wf := range dbRes { - var err error - wf.ProjectKey, err = db.SelectStr("SELECT projectkey FROM project WHERE id = $1", wf.ProjectID) - if err != nil { - return nil, sdk.WrapError(err, "cannot load project key for workflow %s and project_id %d", wf.Name, wf.ProjectID) + if !opts.Minimal { + if err := CompleteWorkflow(ctx, db, &ws, proj, opts); err != nil { + return nil, err } - workflows[i] = sdk.Workflow(wf) } - - return workflows, nil -} - -// LoadByWorkflowTemplateID load all workflows linked to a workflow template but without loading workflow details -func LoadByWorkflowTemplateID(ctx context.Context, db gorp.SqlExecutor, templateID int64) (sdk.Workflows, error) { - query := ` - SELECT workflow.* - FROM workflow - JOIN workflow_template_instance ON workflow_template_instance.workflow_id = workflow.id - WHERE workflow_template_instance.workflow_template_id = $1 AND workflow.to_delete = false` - args := []interface{}{templateID} - return loadByWorkflowTemplateID(ctx, db, query, args) + return &ws, nil } -func load(ctx context.Context, db gorp.SqlExecutor, proj sdk.Project, opts LoadOptions, query string, args ...interface{}) (*sdk.Workflow, error) { - t0 := time.Now() - dbRes := Workflow{} - - _, next := observability.Span(ctx, "workflow.load.selectOne") - err := db.SelectOne(&dbRes, query, args...) - next() - if err != nil { - if err == sql.ErrNoRows { - return nil, sdk.WithStack(sdk.ErrNotFound) - } - return nil, sdk.WrapError(err, "Unable to load workflow") - } - - res := sdk.Workflow(dbRes) - if proj.Key == "" { - res.ProjectKey, _ = db.SelectStr("select projectkey from project where id = $1", res.ProjectID) - } else { - res.ProjectKey = proj.Key - } +// LoadByID loads a workflow +func LoadByID(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, id int64, opts LoadOptions) (*sdk.Workflow, error) { + dao := opts.GetWorkflowDAO() + dao.Filters.WorkflowIDs = []int64{id} - // Load groups - _, next = observability.Span(ctx, "workflow.load.loadWorkflowGroups") - gps, err := group.LoadWorkflowGroups(db, res.ID) - next() + ws, err := dao.Load(ctx, db) if err != nil { - return nil, sdk.WrapError(err, "Unable to load workflow groups") - } - res.Groups = gps - - res.Pipelines = map[int64]sdk.Pipeline{} - res.Applications = map[int64]sdk.Application{} - res.Environments = map[int64]sdk.Environment{} - res.HookModels = map[int64]sdk.WorkflowHookModel{} - res.OutGoingHookModels = map[int64]sdk.WorkflowHookModel{} - - if opts.WithLabels { - _, next = observability.Span(ctx, "workflow.load.Labels") - labels, errL := Labels(db, res.ID) - next() - - if errL != nil { - return nil, sdk.WrapError(errL, "Load> unable to load labels") - } - res.Labels = labels - } - - if opts.WithAsCodeUpdateEvent { - _, next = observability.Span(ctx, "workflow.load.AddCodeUpdateEvents") - asCodeEvents, err := ascode.LoadEventsByWorkflowID(ctx, db, res.ID) - next() - if err != nil { - return nil, sdk.WrapError(err, "unable to load as code update events") - } - res.AsCodeEvent = asCodeEvents - } - - if opts.WithIntegrations { - _, next = observability.Span(ctx, "workflow.load.AddIntegrations") - integrations, errInt := integration.LoadIntegrationsByWorkflowID(db, res.ID, false) - next() - - if errInt != nil { - return nil, sdk.WrapError(errInt, "Load> unable to load workflow integrations") - } - res.EventIntegrations = integrations + return nil, err } - if opts.WithTemplate { - wti, err := workflowtemplate.LoadInstanceByWorkflowID(ctx, db, res.ID, workflowtemplate.LoadInstanceOptions.WithTemplate) - if err != nil && !sdk.ErrorIs(err, sdk.ErrNotFound) { + if !opts.Minimal { + if err := CompleteWorkflow(ctx, db, &ws, proj, opts); err != nil { return nil, err } - if wti != nil { - res.TemplateInstance = wti - res.FromTemplate = fmt.Sprintf("%s@%d", wti.Template.Path(), wti.WorkflowTemplateVersion) - res.TemplateUpToDate = wti.Template.Version == wti.WorkflowTemplateVersion - } - } - - _, next = observability.Span(ctx, "workflow.load.loadNotifications") - notifs, errN := loadNotifications(db, &res) - next() - - if errN != nil { - return nil, sdk.WrapError(errN, "Load> Unable to load workflow notification") } - res.Notifications = notifs - - delta := time.Since(t0).Seconds() - - log.Debug("Load> Load workflow (%s/%s)%d took %.3f seconds", res.ProjectKey, res.Name, res.ID, delta) - w := &res - return w, nil -} - -func IsFavorite(db gorp.SqlExecutor, w *sdk.Workflow, uID string) (bool, error) { - count, err := db.SelectInt("SELECT COUNT(1) FROM workflow_favorite WHERE authentified_user_id = $1 AND workflow_id = $2", uID, w.ID) - if err != nil { - return false, sdk.WithStack(err) - } - return count > 0, nil + return &ws, nil } // Insert inserts a new workflow func Insert(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, w *sdk.Workflow) error { - if err := IsValid(ctx, store, db, w, proj, LoadOptions{}); err != nil { - return sdk.WrapError(err, "Unable to validate workflow") + if err := CompleteWorkflow(ctx, db, w, proj, LoadOptions{}); err != nil { + return err + } + + if err := CheckValidity(ctx, db, w); err != nil { + return err } if w.WorkflowData.Node.Context != nil && w.WorkflowData.Node.Context.ApplicationID != 0 { @@ -637,11 +301,16 @@ func Insert(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sd } w.LastModified = time.Now() - if err := db.QueryRow("INSERT INTO workflow (name, description, icon, project_id, history_length, from_repository) VALUES ($1, $2, $3, $4, $5, $6) RETURNING id", w.Name, w.Description, w.Icon, w.ProjectID, w.HistoryLength, w.FromRepository).Scan(&w.ID); err != nil { + if err := db.QueryRow(`INSERT INTO workflow ( + name, description, icon, project_id, history_length, from_repository, purge_tags, workflow_data, metadata + ) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + RETURNING id`, + w.Name, w.Description, w.Icon, w.ProjectID, w.HistoryLength, w.FromRepository, w.PurgeTags, w.WorkflowData, w.Metadata).Scan(&w.ID); err != nil { return sdk.WrapError(err, "Unable to insert workflow %s/%s", w.ProjectKey, w.Name) } - dbw := Workflow(*w) + dbw := Workflow{Workflow: *w} if err := dbw.PostInsert(db); err != nil { return sdk.WrapError(err, "Cannot post insert hook") } @@ -734,7 +403,7 @@ func Insert(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sd } } - dbWorkflow := Workflow(*w) + dbWorkflow := Workflow{Workflow: *w} if err := dbWorkflow.PostUpdate(db); err != nil { return sdk.WrapError(err, "Insert> Unable to create workflow data") } @@ -917,9 +586,11 @@ func RenameNode(ctx context.Context, db gorp.SqlExecutor, w *sdk.Workflow) error // Update updates a workflow func Update(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, wf *sdk.Workflow, uptOption UpdateOptions) error { - ctx, end := observability.Span(ctx, "workflow.Update") - defer end() - if err := IsValid(ctx, store, db, wf, proj, LoadOptions{}); err != nil { + if err := CompleteWorkflow(ctx, db, wf, proj, LoadOptions{}); err != nil { + return err + } + + if err := CheckValidity(ctx, db, wf); err != nil { return err } @@ -982,11 +653,11 @@ func Update(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sd } wf.LastModified = time.Now() - dbw := Workflow(*wf) - if _, err := db.Update(&dbw); err != nil { + dbw := Workflow{Workflow: *wf} + if _, err := db.UpdateColumns(func(c *gorp.ColumnMap) bool { return c.ColumnName != "project_key" }, &dbw); err != nil { return sdk.WrapError(err, "Unable to update workflow") } - *wf = sdk.Workflow(dbw) + *wf = dbw.Get() return nil } @@ -1026,7 +697,7 @@ func Delete(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sd } //Delete workflow - dbw := Workflow(*w) + dbw := Workflow{Workflow: *w} if _, err := db.Delete(&dbw); err != nil { return sdk.WrapError(err, "unable to delete workflow") } @@ -1034,8 +705,54 @@ func Delete(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sd return nil } -// IsValid cheks workflow validity -func IsValid(ctx context.Context, store cache.Store, db gorp.SqlExecutor, w *sdk.Workflow, proj sdk.Project, opts LoadOptions) error { +func CompleteWorkflow(ctx context.Context, db gorp.SqlExecutor, w *sdk.Workflow, proj sdk.Project, opts LoadOptions) error { + + w.InitMaps() + w.AssignEmptyType() + + nodesArray := w.WorkflowData.Array() + for i := range nodesArray { + n := nodesArray[i] + if n.Context == nil { + continue + } + + if err := checkPipeline(ctx, db, proj, w, n, opts); err != nil { + return err + } + if err := checkApplication(db, proj, w, n); err != nil { + return err + } + if err := checkEnvironment(db, proj, w, n); err != nil { + return err + } + if err := checkProjectIntegration(proj, w, n); err != nil { + return err + } + if err := checkEventIntegration(proj, w); err != nil { + return err + } + if err := checkHooks(db, w, n); err != nil { + return err + } + if err := checkOutGoingHook(db, w, n); err != nil { + return err + } + + if n.Context.ApplicationID != 0 && n.Context.ProjectIntegrationID != 0 { + if err := n.CheckApplicationDeploymentStrategies(proj, w); err != nil { + return sdk.NewError(sdk.ErrWorkflowInvalid, err) + } + } + } + + w.Normalize() + + return nil +} + +// CheckValidity checks workflow validity +func CheckValidity(ctx context.Context, db gorp.SqlExecutor, w *sdk.Workflow) error { //Check project is not empty if w.ProjectKey == "" { return sdk.NewError(sdk.ErrWorkflowInvalid, fmt.Errorf("Invalid project key")) @@ -1063,25 +780,6 @@ func IsValid(ctx context.Context, store cache.Store, db gorp.SqlExecutor, w *sdk } } - if w.Pipelines == nil { - w.Pipelines = make(map[int64]sdk.Pipeline) - } - if w.Applications == nil { - w.Applications = make(map[int64]sdk.Application) - } - if w.Environments == nil { - w.Environments = make(map[int64]sdk.Environment) - } - if w.ProjectIntegrations == nil { - w.ProjectIntegrations = make(map[int64]sdk.ProjectIntegration) - } - if w.HookModels == nil { - w.HookModels = make(map[int64]sdk.WorkflowHookModel) - } - if w.OutGoingHookModels == nil { - w.OutGoingHookModels = make(map[int64]sdk.WorkflowHookModel) - } - if w.WorkflowData.Node.Context != nil && w.WorkflowData.Node.Context.DefaultPayload != nil { defaultPayload, err := w.WorkflowData.Node.Context.DefaultPayloadToMap() if err != nil { @@ -1095,47 +793,10 @@ func IsValid(ctx context.Context, store cache.Store, db gorp.SqlExecutor, w *sdk } // Fill empty node type - w.AssignEmptyType() if err := w.ValidateType(); err != nil { return err } - nodesArray := w.WorkflowData.Array() - for i := range nodesArray { - n := nodesArray[i] - if n.Context == nil { - continue - } - - if err := checkPipeline(ctx, db, proj, w, n, opts); err != nil { - return err - } - if err := checkApplication(store, db, proj, w, n); err != nil { - return err - } - if err := checkEnvironment(db, proj, w, n); err != nil { - return err - } - if err := checkProjectIntegration(proj, w, n); err != nil { - return err - } - if err := checkEventIntegration(proj, w); err != nil { - return err - } - if err := checkHooks(db, w, n); err != nil { - return err - } - if err := checkOutGoingHook(db, w, n); err != nil { - return err - } - - if n.Context.ApplicationID != 0 && n.Context.ProjectIntegrationID != 0 { - if err := n.CheckApplicationDeploymentStrategies(proj, w); err != nil { - return sdk.NewError(sdk.ErrWorkflowInvalid, err) - } - } - } - return nil } @@ -1181,7 +842,6 @@ func checkHooks(db gorp.SqlExecutor, w *sdk.Workflow, n *sdk.Node) error { } w.HookModels[h.HookModelID] = *hmDB } - h.HookModelName = w.HookModels[h.HookModelID].Name } else { hm, err := LoadHookModelByName(db, h.HookModelName) if err != nil { @@ -1252,7 +912,6 @@ func checkProjectIntegration(proj sdk.Project, w *sdk.Workflow, n *sdk.Node) err } w.ProjectIntegrations[n.Context.ProjectIntegrationID] = pp } - n.Context.ProjectIntegrationName = pp.Name return nil } if n.Context.ProjectIntegrationName != "" { @@ -1309,7 +968,6 @@ func checkEnvironment(db gorp.SqlExecutor, proj sdk.Project, w *sdk.Workflow, n w.Environments[n.Context.EnvironmentID] = env } - n.Context.EnvironmentName = env.Name return nil } if n.Context.EnvironmentName != "" { @@ -1324,7 +982,7 @@ func checkEnvironment(db gorp.SqlExecutor, proj sdk.Project, w *sdk.Workflow, n } // CheckApplication checks application data -func checkApplication(store cache.Store, db gorp.SqlExecutor, proj sdk.Project, w *sdk.Workflow, n *sdk.Node) error { +func checkApplication(db gorp.SqlExecutor, proj sdk.Project, w *sdk.Workflow, n *sdk.Node) error { if n.Context.ApplicationID != 0 { app, ok := w.Applications[n.Context.ApplicationID] if !ok { @@ -1339,7 +997,6 @@ func checkApplication(store cache.Store, db gorp.SqlExecutor, proj sdk.Project, w.Applications[n.Context.ApplicationID] = app } - n.Context.ApplicationName = app.Name return nil } if n.Context.ApplicationName != "" { @@ -1374,7 +1031,6 @@ func checkPipeline(ctx context.Context, db gorp.SqlExecutor, proj sdk.Project, w w.Pipelines[n.Context.PipelineID] = pip } - n.Context.PipelineName = pip.Name return nil } if n.Context.PipelineName != "" { diff --git a/engine/api/workflow/dao_data.go b/engine/api/workflow/dao_data.go index ad0a2868da..c5115ae085 100644 --- a/engine/api/workflow/dao_data.go +++ b/engine/api/workflow/dao_data.go @@ -67,8 +67,8 @@ func InsertWorkflowData(db gorp.SqlExecutor, w *sdk.Workflow) error { } } - dbWorkflow := Workflow(*w) - if err := dbWorkflow.PostUpdate(db); err != nil { + dbWorkflow := Workflow{Workflow: *w} + if _, err := db.UpdateColumns(func(c *gorp.ColumnMap) bool { return c.ColumnName != "project_key" }, &dbWorkflow); err != nil { return sdk.WrapError(err, "InsertWorkflowData> unable to update workflow data") } diff --git a/engine/api/workflow/dao_label.go b/engine/api/workflow/dao_label.go index 0fdb7b74d2..a0bc43a1b3 100644 --- a/engine/api/workflow/dao_label.go +++ b/engine/api/workflow/dao_label.go @@ -8,10 +8,12 @@ import ( "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/sdk" + "github.com/ovh/cds/sdk/log" ) // LabelWorkflow link a label on a workflow given his workflow id func LabelWorkflow(db gorp.SqlExecutor, labelID, workflowID int64) error { + log.Debug("LabelWorkflow> %d %d", labelID, workflowID) if _, err := db.Exec("INSERT INTO project_label_workflow (label_id, workflow_id) VALUES ($1, $2)", labelID, workflowID); err != nil { if errPG, ok := err.(*pq.Error); ok && errPG.Code == gorpmapping.ViolateUniqueKeyPGCode { return sdk.WrapError(sdk.ErrConflict, "LabelWorkflow> this label %d is already linked to workflow %d", labelID, workflowID) @@ -31,24 +33,32 @@ func UnLabelWorkflow(db gorp.SqlExecutor, labelID, workflowID int64) error { return nil } -// Labels return list of labels given a workflow ID -func Labels(db gorp.SqlExecutor, workflowID int64) ([]sdk.Label, error) { - var labels []sdk.Label +type dbLabel struct { + sdk.Label + WorkflowID int64 `db:"workflow_id"` +} + +// LoadLabels return list of labels given a workflow ID +func LoadLabels(db gorp.SqlExecutor, workflowIDs ...int64) ([]sdk.Label, error) { + var labels []dbLabel query := ` - SELECT project_label.* - FROM project_label - JOIN project_label_workflow ON project_label.id = project_label_workflow.label_id - WHERE project_label_workflow.workflow_id = $1 - ` - if _, err := db.Select(&labels, query, workflowID); err != nil { + SELECT project_label.*, project_label_workflow.workflow_id + FROM project_label + JOIN project_label_workflow ON project_label.id = project_label_workflow.label_id + WHERE project_label_workflow.workflow_id = ANY($1)` + + if _, err := db.Select(&labels, query, pq.Int64Array(workflowIDs)); err != nil { if err == sql.ErrNoRows { - return labels, nil + return nil, nil } - return labels, sdk.WrapError(err, "Cannot load labels") + return nil, sdk.WrapError(err, "Cannot load labels") } + + var result = make([]sdk.Label, 0, len(labels)) for i := range labels { - labels[i].WorkflowID = workflowID + labels[i].Label.WorkflowID = labels[i].WorkflowID + result = append(result, labels[i].Label) } - return labels, nil + return result, nil } diff --git a/engine/api/workflow/dao_notification.go b/engine/api/workflow/dao_notification.go index 065a1cc2d9..d0c293a7a4 100644 --- a/engine/api/workflow/dao_notification.go +++ b/engine/api/workflow/dao_notification.go @@ -4,6 +4,7 @@ import ( "database/sql" "github.com/go-gorp/gorp" + "github.com/lib/pq" "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/sdk" ) @@ -16,59 +17,55 @@ func DeleteNotifications(db gorp.SqlExecutor, workflowID int64) error { return nil } -func loadNotifications(db gorp.SqlExecutor, w *sdk.Workflow) ([]sdk.WorkflowNotification, error) { - notifIDs := []int64{} - _, err := db.Select(¬ifIDs, "select id from workflow_notification where workflow_id = $1", w.ID) - if err != nil { +func LoadNotificationsByWorkflowIDs(db gorp.SqlExecutor, ids []int64) (map[int64][]sdk.WorkflowNotification, error) { + query := ` + SELECT + workflow_notification.*, + array_remove(array_agg(workflow_notification_source.node_id::text), NULL) "node_ids" + FROM workflow_notification + LEFT OUTER JOIN workflow_notification_source ON workflow_notification_source.workflow_notification_id = workflow_notification.id + WHERE workflow_notification.workflow_id = ANY($1) + GROUP BY workflow_notification.workflow_id, workflow_notification.id + ORDER BY workflow_notification.workflow_id` + + var dbNotifs = []struct { + ID int64 `db:"id"` + WorkflowID int64 `db:"workflow_id"` + NodeIDs pq.Int64Array `db:"node_ids"` + Type string `db:"type"` + Settings sdk.UserNotificationSettings `db:"settings"` + }{} + + if _, err := db.Select(&dbNotifs, query, pq.Int64Array(ids)); err != nil { if err == sql.ErrNoRows { return nil, nil } - return nil, sdk.WrapError(err, "Unable to load notification IDs on workflow %d", w.ID) - } - - notifications := make([]sdk.WorkflowNotification, len(notifIDs)) - for index, id := range notifIDs { - n, errJ := loadNotification(db, w, id) - if errJ != nil { - return nil, sdk.WrapError(errJ, "loadNotification> Unable to load notification %d on workflow %d", id, w.ID) - } - notifications[index] = n - } - - return notifications, nil -} - -func loadNotification(db gorp.SqlExecutor, w *sdk.Workflow, id int64) (sdk.WorkflowNotification, error) { - dbnotif := Notification{} - //Load the notification - if err := db.SelectOne(&dbnotif, "select * from workflow_notification where id = $1", id); err != nil { - return sdk.WorkflowNotification{}, sdk.WrapError(err, "Unable to load notification %d", id) + return nil, sdk.WithStack(err) } - dbnotif.WorkflowID = w.ID - //Load sources - var ids []int64 - if _, err := db.Select(&ids, "select node_id from workflow_notification_source where workflow_notification_id = $1", id); err != nil { - return sdk.WorkflowNotification{}, sdk.WrapError(err, "Unable to load notification %d sources", id) - } - dbnotif.NodeIDs = ids - n := sdk.WorkflowNotification(dbnotif) + mapNotifs := make(map[int64][]sdk.WorkflowNotification) - for _, id := range n.NodeIDs { - notifNode := w.WorkflowData.NodeByID(id) - if notifNode != nil { - n.SourceNodeRefs = append(n.SourceNodeRefs, notifNode.Name) + for _, n := range dbNotifs { + arrayNotif := mapNotifs[n.WorkflowID] + notif := sdk.WorkflowNotification{ + ID: n.ID, + Settings: n.Settings, + NodeIDs: n.NodeIDs, + Type: n.Type, + WorkflowID: n.WorkflowID, } + // Need the node_name for references... + arrayNotif = append(arrayNotif, notif) + mapNotifs[n.WorkflowID] = arrayNotif } - return n, nil + return mapNotifs, nil } func InsertNotification(db gorp.SqlExecutor, w *sdk.Workflow, n *sdk.WorkflowNotification) error { n.WorkflowID = w.ID n.ID = 0 n.NodeIDs = nil - dbNotif := Notification(*n) for _, s := range n.SourceNodeRefs { nodeFoundRef := w.WorkflowData.NodeByName(s) @@ -78,6 +75,8 @@ func InsertNotification(db gorp.SqlExecutor, w *sdk.Workflow, n *sdk.WorkflowNot n.NodeIDs = append(n.NodeIDs, nodeFoundRef.ID) } + dbNotif := Notification(*n) + //Insert the notification if err := db.Insert(&dbNotif); err != nil { return sdk.WrapError(err, "Unable to insert workflow notification") diff --git a/engine/api/workflow/dao_test.go b/engine/api/workflow/dao_test.go index b1f3165fda..5cce40ea2a 100644 --- a/engine/api/workflow/dao_test.go +++ b/engine/api/workflow/dao_test.go @@ -202,6 +202,8 @@ func TestInsertSimpleWorkflowWithApplicationAndEnv(t *testing.T) { test.NoError(t, err) assert.Equal(t, w.ID, w1.ID) + require.NotNil(t, w.ID, w1.WorkflowData) + require.NotEqual(t, 0, w1.WorkflowData.Node.ID) assert.Equal(t, w.WorkflowData.Node.Context.ApplicationID, w1.WorkflowData.Node.Context.ApplicationID) assert.Equal(t, w.WorkflowData.Node.Context.EnvironmentID, w1.WorkflowData.Node.Context.EnvironmentID) assert.Equal(t, w.WorkflowData.Node.Context.Mutex, w1.WorkflowData.Node.Context.Mutex) diff --git a/engine/api/workflow/dao_workflow_favorite.go b/engine/api/workflow/dao_workflow_favorite.go new file mode 100644 index 0000000000..c7cb4d5256 --- /dev/null +++ b/engine/api/workflow/dao_workflow_favorite.go @@ -0,0 +1,24 @@ +package workflow + +import ( + "github.com/go-gorp/gorp" + + "github.com/ovh/cds/sdk" +) + +func IsFavorite(db gorp.SqlExecutor, w *sdk.Workflow, uID string) (bool, error) { + count, err := db.SelectInt("SELECT COUNT(1) FROM workflow_favorite WHERE authentified_user_id = $1 AND workflow_id = $2", uID, w.ID) + if err != nil { + return false, sdk.WithStack(err) + } + return count > 0, nil +} + +// UserFavoriteWorkflowIDs returns the list of workflow ID +func UserFavoriteWorkflowIDs(db gorp.SqlExecutor, uID string) ([]int64, error) { + var result []int64 + if _, err := db.Select(&result, "SELECT workflow_id FROM workflow_favorite WHERE authentified_user_id = $1", uID); err != nil { + return nil, sdk.WithStack(err) + } + return result, nil +} diff --git a/engine/api/workflow/aggregate.go b/engine/api/workflow/dao_workflow_template_instance.go similarity index 100% rename from engine/api/workflow/aggregate.go rename to engine/api/workflow/dao_workflow_template_instance.go diff --git a/engine/api/workflow/dao_workflow_usage.go b/engine/api/workflow/dao_workflow_usage.go new file mode 100644 index 0000000000..8d530db726 --- /dev/null +++ b/engine/api/workflow/dao_workflow_usage.go @@ -0,0 +1,69 @@ +package workflow + +import ( + "context" + + "github.com/go-gorp/gorp" + + "github.com/ovh/cds/sdk" +) + +// LoadByPipelineName loads a workflow for a given project key and pipeline name +func LoadByPipelineName(ctx context.Context, db gorp.SqlExecutor, projectKey string, pipName string) ([]sdk.WorkflowName, error) { + query := `SELECT distinct workflow.*, project.projectkey as "project_key", project.id as "project_id" + from workflow + join project on project.id = workflow.project_id + join w_node on w_node.workflow_id = workflow.id + join w_node_context on w_node_context.node_id = w_node.id + join pipeline on pipeline.id = w_node_context.pipeline_id + where project.projectkey = $1 and pipeline.name = $2 + and workflow.to_delete = false + order by workflow.name asc` + var result []sdk.WorkflowName // This struct is not registered as a gorpmapping entity so we can't use gorpmapping.Query + _, err := db.Select(&result, query, projectKey, pipName) + return result, sdk.WithStack(err) +} + +// LoadByApplicationName loads a workflow for a given project key and application name +func LoadByApplicationName(ctx context.Context, db gorp.SqlExecutor, projectKey string, appName string) ([]sdk.WorkflowName, error) { + query := `SELECT distinct workflow.*, project.projectkey as "project_key", project.id as "project_id" + from workflow + join project on project.id = workflow.project_id + join w_node on w_node.workflow_id = workflow.id + join w_node_context on w_node_context.node_id = w_node.id + join application on w_node_context.application_id = application.id + where project.projectkey = $1 and application.name = $2 + and workflow.to_delete = false + order by workflow.name asc` + var result []sdk.WorkflowName // This struct is not registered as a gorpmapping entity so we can't use gorpmapping.Query + _, err := db.Select(&result, query, projectKey, appName) + return result, sdk.WithStack(err) +} + +// LoadByEnvName loads a workflow for a given project key and environment name (ie. checking permissions) +func LoadByEnvName(ctx context.Context, db gorp.SqlExecutor, projectKey string, envName string) ([]sdk.WorkflowName, error) { + query := `SELECT distinct workflow.*, project.projectkey as "project_key", project.id as "project_id" + from workflow + join project on project.id = workflow.project_id + join w_node on w_node.workflow_id = workflow.id + join w_node_context on w_node_context.node_id = w_node.id + join environment on w_node_context.environment_id = environment.id + where project.projectkey = $1 and environment.name = $2 + and workflow.to_delete = false + order by workflow.name asc` + var result []sdk.WorkflowName // This struct is not registered as a gorpmapping entity so we can't use gorpmapping.Query + _, err := db.Select(&result, query, projectKey, envName) + return result, sdk.WithStack(err) +} + +// LoadByWorkflowTemplateID load all workflow names linked to a workflow template +func LoadByWorkflowTemplateID(ctx context.Context, db gorp.SqlExecutor, templateID int64) ([]sdk.WorkflowName, error) { + query := `SELECT distinct workflow.*, project.projectkey as "project_key", project.id as "project_id" + FROM workflow + JOIN workflow_template_instance ON workflow_template_instance.workflow_id = workflow.id + JOIN project on project.id = workflow.project_id + WHERE workflow_template_instance.workflow_template_id = $1 AND workflow.to_delete = false` + var result []sdk.WorkflowName // This struct is not registered as a gorpmapping entity so we can't use gorpmapping.Query + _, err := db.Select(&result, query, templateID) + return result, sdk.WithStack(err) +} diff --git a/engine/api/workflow/factory_dao.go b/engine/api/workflow/factory_dao.go new file mode 100644 index 0000000000..615cd435ac --- /dev/null +++ b/engine/api/workflow/factory_dao.go @@ -0,0 +1,678 @@ +package workflow + +import ( + "context" + "fmt" + "time" + + "github.com/go-gorp/gorp" + "github.com/lib/pq" + + "github.com/ovh/cds/engine/api/application" + "github.com/ovh/cds/engine/api/ascode" + "github.com/ovh/cds/engine/api/database/gorpmapping" + "github.com/ovh/cds/engine/api/environment" + "github.com/ovh/cds/engine/api/group" + "github.com/ovh/cds/engine/api/integration" + "github.com/ovh/cds/engine/api/pipeline" + "github.com/ovh/cds/engine/api/workflowtemplate" + "github.com/ovh/cds/sdk" + "github.com/ovh/cds/sdk/log" +) + +// LoadOptions custom option for loading workflow +type LoadOptions struct { + Minimal bool + DeepPipeline bool + WithLabels bool + WithIcon bool + WithAsCodeUpdateEvent bool + WithIntegrations bool + WithTemplate bool + WithFavoritesForUserID string +} + +func (loadOpts LoadOptions) GetWorkflowDAO() WorkflowDAO { + var dao WorkflowDAO + + if !loadOpts.Minimal { + dao.Loaders.WithPipelines = true + dao.Loaders.WithApplications = true + dao.Loaders.WithEnvironments = true + dao.Loaders.WithIntegrations = true + dao.Loaders.WithFavoritesForUserID = loadOpts.WithFavoritesForUserID + + if loadOpts.WithIcon { + dao.Loaders.WithIcon = true + } + if loadOpts.WithAsCodeUpdateEvent { + dao.Loaders.WithAsCodeUpdateEvents = true + } + if loadOpts.WithTemplate { + dao.Loaders.WithTemplate = true + } + if loadOpts.DeepPipeline { + dao.Loaders.WithDeepPipelines = true + } + if loadOpts.WithLabels { + dao.Loaders.WithLabels = true + } + } + return dao +} + +type LoadAllWorkflowsOptionsFilters struct { + ProjectKey string + WorkflowName string + VCSServer string + ApplicationRepository string + FromRepository string + GroupIDs []int64 + WorkflowIDs []int64 + DisableFilterDeletedWorkflow bool +} + +type LoadAllWorkflowsOptionsLoaders struct { + WithApplications bool + WithPipelines bool + WithDeepPipelines bool + WithEnvironments bool + WithIntegrations bool + WithIcon bool + WithAsCodeUpdateEvents bool + WithTemplate bool + WithLabels bool + WithAudits bool + WithFavoritesForUserID string +} + +type WorkflowDAO struct { + Filters LoadAllWorkflowsOptionsFilters + Loaders LoadAllWorkflowsOptionsLoaders + Offset int + Limit int + Ascending bool + Lock bool +} + +func (dao WorkflowDAO) Query() gorpmapping.Query { + var queryString = ` + WITH + workflow_root_application_id AS ( + SELECT + id as "workflow_id", + project_id, + name as "workflow_name", + (workflow_data -> 'node' -> 'context' ->> 'application_id')::BIGINT as "root_application_id" + FROM workflow + ), + project_permission AS ( + SELECT + project_id, + ARRAY_AGG(group_id) as "groups" + FROM project_group + GROUP BY project_id + ), + selected_workflow AS ( + SELECT + project.id, + workflow_root_application_id.workflow_id, + project.projectkey, + workflow_name, + application.id, + application.name, + application.vcs_server, + application.repo_fullname, + project_permission.groups + FROM workflow_root_application_id + LEFT OUTER JOIN application ON application.id = root_application_id + JOIN project ON project.id = workflow_root_application_id.project_id + JOIN project_permission ON project_permission.project_id = project.id + ) + SELECT workflow.* , selected_workflow.projectkey as "project_key" + FROM workflow + JOIN selected_workflow ON selected_workflow.workflow_id = workflow.id + JOIN project_permission ON project_permission.project_id = workflow.project_id + ` + + var filters []string + var args []interface{} + if dao.Filters.ProjectKey != "" { + filters = append(filters, "selected_workflow.projectkey = $%d") + args = append(args, dao.Filters.ProjectKey) + } + if dao.Filters.WorkflowName != "" { + filters = append(filters, "selected_workflow.workflow_name = $%d") + args = append(args, dao.Filters.WorkflowName) + } + if dao.Filters.VCSServer != "" { + filters = append(filters, "selected_workflow.vcs_server = $%d") + args = append(args, dao.Filters.VCSServer) + } + if dao.Filters.ApplicationRepository != "" { + filters = append(filters, "selected_workflow.repo_fullname = $%d") + args = append(args, dao.Filters.ApplicationRepository) + } + if dao.Filters.FromRepository != "" { + filters = append(filters, "workflow.from_repository = $%d") + args = append(args, dao.Filters.FromRepository) + } + if len(dao.Filters.GroupIDs) != 0 { + filters = append(filters, "selected_workflow.groups && $%d") + args = append(args, pq.Int64Array(dao.Filters.GroupIDs)) + } + if len(dao.Filters.WorkflowIDs) != 0 { + filters = append(filters, "workflow.id = ANY($%d)") + args = append(args, pq.Int64Array(dao.Filters.WorkflowIDs)) + } + + for i, f := range filters { + if i == 0 { + queryString += " WHERE " + } else { + queryString += " AND " + } + queryString += fmt.Sprintf(f, i+1) + } + + if !dao.Filters.DisableFilterDeletedWorkflow { + queryString += " AND workflow.to_delete = false" + } + + var order = " ORDER BY selected_workflow.projectkey, selected_workflow.workflow_name " + if dao.Ascending { + order += "ASC" + } else { + order += "DESC" + } + queryString += order + + if dao.Offset != 0 { + queryString += fmt.Sprintf(" OFFSET %d", dao.Offset) + } + + if dao.Limit != 0 { + queryString += fmt.Sprintf(" LIMIT %d", dao.Limit) + } + + if dao.Lock { + queryString += " for update skip locked" + } + + q := gorpmapping.NewQuery(queryString).Args(args...) + + log.Debug("workflow.WorkflowDAO.Query> %v", q) + + return q +} + +func (dao WorkflowDAO) GetLoaders() []gorpmapping.GetOptionFunc { + + var loaders = []gorpmapping.GetOptionFunc{} + + if dao.Loaders.WithApplications { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withApplications(db, ws) + }) + } + + if dao.Loaders.WithEnvironments { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withEnvironments(db, ws) + }) + } + + if dao.Loaders.WithDeepPipelines { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withPipelines(db, ws, true) + }) + } else if dao.Loaders.WithPipelines { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withPipelines(db, ws, false) + }) + } + + if dao.Loaders.WithAsCodeUpdateEvents { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withAsCodeUpdateEvents(db, ws) + }) + } + + if !dao.Loaders.WithIcon { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + for j := range *ws { + w := (*ws)[j] + w.Icon = "" + } + return nil + }) + } + + if dao.Loaders.WithIntegrations { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withIntegrations(db, ws) + }) + } + + if dao.Loaders.WithTemplate { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withTemplates(db, ws) + }) + } + + if dao.Loaders.WithLabels { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withLabels(db, ws) + }) + } + + if dao.Loaders.WithFavoritesForUserID != "" { + loaders = append(loaders, func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withFavorites(db, ws, dao.Loaders.WithFavoritesForUserID) + }) + } + + loaders = append(loaders, + func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withGroups(db, ws) + }, + func(db gorp.SqlExecutor, i interface{}) error { + ws := i.(*[]Workflow) + return dao.withNotifications(db, ws) + }) + + return loaders +} + +func (dao WorkflowDAO) withGroups(db gorp.SqlExecutor, ws *[]Workflow) error { + var mapIDs = map[int64]*Workflow{} + for _, w := range *ws { + mapIDs[w.ID] = &w + } + + var ids = make([]int64, 0, len(mapIDs)) + for id := range mapIDs { + ids = append(ids, id) + } + + perms, err := group.LoadWorkflowGroupsByWorkflowIDs(db, ids) + if err != nil { + return err + } + + for workflowID, perm := range perms { + for i, w := range *ws { + if w.ID == workflowID { + w.Groups = perm + (*ws)[i] = w + break + } + } + } + + return nil +} + +func (dao WorkflowDAO) withEnvironments(db gorp.SqlExecutor, ws *[]Workflow) error { + var mapIDs = map[int64]*sdk.Environment{} + for _, w := range *ws { + nodesArray := w.WorkflowData.Array() + for _, n := range nodesArray { + if n.Context != nil && n.Context.EnvironmentID != 0 { + if _, ok := mapIDs[n.Context.EnvironmentID]; !ok { + mapIDs[n.Context.EnvironmentID] = nil + } + } + } + } + + var ids = make([]int64, 0, len(mapIDs)) + for id := range mapIDs { + ids = append(ids, id) + } + + envs, err := environment.LoadAllByIDs(db, ids) + if err != nil { + return err + } + + for id := range mapIDs { + for i := range envs { + if id == envs[i].ID { + mapIDs[id] = &envs[i] + } + } + } + + for x := range *ws { + w := &(*ws)[x] + w.InitMaps() + nodesArray := w.WorkflowData.Array() + for i := range nodesArray { + n := nodesArray[i] + if n.Context != nil && n.Context.EnvironmentID != 0 { + if env, ok := mapIDs[n.Context.EnvironmentID]; ok { + if env == nil { + return sdk.WrapError(sdk.ErrNotFound, "unable to find environment %d", n.Context.EnvironmentID) + } + w.Environments[n.Context.EnvironmentID] = *env + } + } + } + } + + return nil +} + +func (dao WorkflowDAO) withPipelines(db gorp.SqlExecutor, ws *[]Workflow, deep bool) error { + var mapIDs = map[int64]*sdk.Pipeline{} + for _, w := range *ws { + nodesArray := w.WorkflowData.Array() + for _, n := range nodesArray { + if n.Context != nil && n.Context.PipelineID != 0 { + if _, ok := mapIDs[n.Context.PipelineID]; !ok { + mapIDs[n.Context.PipelineID] = nil + } + } + } + } + + var ids = make([]int64, 0, len(mapIDs)) + for id := range mapIDs { + ids = append(ids, id) + } + + pips, err := pipeline.LoadAllByIDs(db, ids, deep) + if err != nil { + return err + } + + for id := range mapIDs { + for i := range pips { + if id == pips[i].ID { + mapIDs[id] = &pips[i] + } + } + } + + for x := range *ws { + w := &(*ws)[x] + w.InitMaps() + nodesArray := w.WorkflowData.Array() + for i := range nodesArray { + n := nodesArray[i] + if n.Context != nil && n.Context.PipelineID != 0 { + if pip, ok := mapIDs[n.Context.PipelineID]; ok { + if pip == nil { + return sdk.WrapError(sdk.ErrNotFound, "unable to find pipeline %d", n.Context.PipelineID) + } + w.Pipelines[n.Context.PipelineID] = *pip + } + } + } + } + + return nil +} + +func (dao WorkflowDAO) withTemplates(db gorp.SqlExecutor, ws *[]Workflow) error { + var mapIDs = map[int64]struct{}{} + for _, w := range *ws { + mapIDs[w.ID] = struct{}{} + } + + var ids = make([]int64, 0, len(mapIDs)) + for id := range mapIDs { + ids = append(ids, id) + } + + wtis, err := workflowtemplate.LoadInstanceByWorkflowIDs(context.Background(), db, ids, workflowtemplate.LoadInstanceOptions.WithTemplate) + if err != nil { + return err + } + + for x := range *ws { + w := &(*ws)[x] + w.InitMaps() + for _, wti := range wtis { + if wti.WorkflowID != nil && w.ID == *wti.WorkflowID { + w.TemplateInstance = &wti + w.FromTemplate = fmt.Sprintf("%s@%d", wti.Template.Path(), wti.WorkflowTemplateVersion) + w.TemplateUpToDate = wti.Template.Version == wti.WorkflowTemplateVersion + break + } + } + } + + return nil +} + +func (dao WorkflowDAO) withIntegrations(db gorp.SqlExecutor, ws *[]Workflow) error { + var mapIDs = map[int64]*sdk.ProjectIntegration{} + for _, w := range *ws { + nodesArray := w.WorkflowData.Array() + for _, n := range nodesArray { + if n.Context != nil && n.Context.ProjectIntegrationID != 0 { + log.Debug("found ProjectIntegrationID=%d(%s) on workflow %d, node=%d", n.Context.ProjectIntegrationID, n.Context.ProjectIntegrationName, w.ID, n.ID) + if _, ok := mapIDs[n.Context.ProjectIntegrationID]; !ok { + mapIDs[n.Context.ProjectIntegrationID] = nil + } + } + } + } + + var ids = make([]int64, 0, len(mapIDs)) + for id := range mapIDs { + ids = append(ids, id) + } + + projectIntegrations, err := integration.LoadIntegrationsByIDs(db, ids) + if err != nil { + return err + } + + for id := range mapIDs { + for i := range projectIntegrations { + if id == projectIntegrations[i].ID { + mapIDs[id] = &projectIntegrations[i] + } + } + } + + for x := range *ws { + w := &(*ws)[x] + w.InitMaps() + nodesArray := w.WorkflowData.Array() + for i := range nodesArray { + n := nodesArray[i] + if n.Context != nil && n.Context.ProjectIntegrationID != 0 { + if integ, ok := mapIDs[n.Context.ProjectIntegrationID]; ok { + if integ == nil { + return sdk.WrapError(sdk.ErrNotFound, "unable to find integration %d", n.Context.ProjectIntegrationID) + } + w.ProjectIntegrations[n.Context.ProjectIntegrationID] = *integ + } + } + } + } + + return nil +} + +func (dao WorkflowDAO) withAsCodeUpdateEvents(db gorp.SqlExecutor, ws *[]Workflow) error { + var ids = make([]int64, 0, len(*ws)) + for _, w := range *ws { + ids = append(ids, w.ID) + } + + asCodeEvents, err := ascode.LoadEventsByWorkflowIDs(context.Background(), db, ids) + if err != nil { + return err + } + + for x := range *ws { + w := &(*ws)[x] + w.InitMaps() + + for _, evt := range asCodeEvents { + if _, ok := evt.Data.Workflows[w.ID]; ok { + w.AsCodeEvent = append(w.AsCodeEvent, evt) + } + } + } + + return nil +} + +func (dao WorkflowDAO) withApplications(db gorp.SqlExecutor, ws *[]Workflow) error { + var mapIDs = map[int64]*sdk.Application{} + for _, w := range *ws { + nodesArray := w.WorkflowData.Array() + for _, n := range nodesArray { + if n.Context != nil && n.Context.ApplicationID != 0 { + if _, ok := mapIDs[n.Context.ApplicationID]; !ok { + mapIDs[n.Context.ApplicationID] = nil + } + } + } + } + + var ids = make([]int64, 0, len(mapIDs)) + for id := range mapIDs { + ids = append(ids, id) + } + + apps, err := application.LoadAllByIDs(db, ids, application.LoadOptions.WithVariables, application.LoadOptions.WithDeploymentStrategies) + if err != nil { + return err + } + + for id := range mapIDs { + for i := range apps { + if id == apps[i].ID { + mapIDs[id] = &apps[i] + } + } + } + + for x := range *ws { + w := &(*ws)[x] + w.InitMaps() + nodesArray := w.WorkflowData.Array() + for i := range nodesArray { + n := nodesArray[i] + if n.Context != nil && n.Context.ApplicationID != 0 { + if app, ok := mapIDs[n.Context.ApplicationID]; ok { + if app == nil { + return sdk.WrapError(sdk.ErrNotFound, "unable to find application %d", n.Context.ApplicationID) + } + w.Applications[n.Context.ApplicationID] = *app + } + } + } + } + + return nil +} + +func (dao WorkflowDAO) withNotifications(db gorp.SqlExecutor, ws *[]Workflow) error { + var ids = make([]int64, 0, len(*ws)) + for _, w := range *ws { + ids = append(ids, w.ID) + } + + notificationsMap, err := LoadNotificationsByWorkflowIDs(db, ids) + if err != nil { + return err + } + + for x := range *ws { + w := &(*ws)[x] + w.Notifications = notificationsMap[w.ID] + log.Debug("workflow %d notifications: %+v", w.ID, w.Notifications) + } + return nil +} + +func (dao WorkflowDAO) withLabels(db gorp.SqlExecutor, ws *[]Workflow) error { + var ids = make([]int64, 0, len(*ws)) + for _, w := range *ws { + ids = append(ids, w.ID) + } + + labels, err := LoadLabels(db, ids...) + if err != nil { + return err + } + + for x := range *ws { + w := &(*ws)[x] + for _, label := range labels { + if w.ID == label.WorkflowID { + w.Labels = append(w.Labels, label) + } + } + } + + return nil +} + +func (dao WorkflowDAO) withFavorites(db gorp.SqlExecutor, ws *[]Workflow, userID string) error { + workflowIDs, err := UserFavoriteWorkflowIDs(db, userID) + if err != nil { + return err + } + + for x := range *ws { + w := &(*ws)[x] + w.Favorite = sdk.IsInInt64Array(w.ID, workflowIDs) + } + + return nil +} + +func (dao WorkflowDAO) Load(ctx context.Context, db gorp.SqlExecutor) (sdk.Workflow, error) { + dao.Limit = 1 + ws, err := dao.LoadAll(ctx, db) + if err != nil { + return sdk.Workflow{}, err + } + if len(ws) == 0 { + return sdk.Workflow{}, sdk.WithStack(sdk.ErrNotFound) + } + return ws[0], nil +} + +func (dao WorkflowDAO) LoadAll(ctx context.Context, db gorp.SqlExecutor) (sdk.Workflows, error) { + t0 := time.Now() + + var workflows []Workflow + if err := gorpmapping.GetAll(ctx, db, dao.Query(), &workflows, dao.GetLoaders()...); err != nil { + return nil, err + } + ws := make(sdk.Workflows, 0, len(workflows)) + for i := range workflows { + if err := workflows[i].PostGet(db); err != nil { + return nil, err + } + w := workflows[i].Get() + w.Normalize() + ws = append(ws, w) + } + + delta := time.Since(t0).Seconds() + log.Info(ctx, "LoadAll - %d results in %.3f seconds", len(ws), delta) + + return ws, nil +} diff --git a/engine/api/workflow/factory_dao_test.go b/engine/api/workflow/factory_dao_test.go new file mode 100644 index 0000000000..3e40c0b4de --- /dev/null +++ b/engine/api/workflow/factory_dao_test.go @@ -0,0 +1,97 @@ +package workflow_test + +import ( + "context" + "fmt" + "testing" + + "github.com/ovh/cds/engine/api/test" + "github.com/ovh/cds/engine/api/workflow" + "github.com/stretchr/testify/require" +) + +func TestLoadAllWorkflows(t *testing.T) { + db, _, end := test.SetupPG(t) + defer end() + + var opts = []workflow.WorkflowDAO{ + {}, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + ProjectKey: "test", + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + WorkflowName: "test", + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + ApplicationRepository: "test", + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + FromRepository: "test", + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + VCSServer: "test", + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + GroupIDs: []int64{1, 2, 3, 4}, + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + ProjectKey: "test", + WorkflowName: "test", + ApplicationRepository: "test", + VCSServer: "test", + GroupIDs: []int64{1, 2, 3, 4}, + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{ + ProjectKey: "test", + ApplicationRepository: "test", + GroupIDs: []int64{1, 2, 3, 4}, + }, + Loaders: workflow.LoadAllWorkflowsOptionsLoaders{ + WithAsCodeUpdateEvents: true, + WithEnvironments: true, + WithApplications: true, + WithIcon: true, + WithIntegrations: true, + WithPipelines: true, + WithTemplate: true, + }, + }, + { + Filters: workflow.LoadAllWorkflowsOptionsFilters{}, + Loaders: workflow.LoadAllWorkflowsOptionsLoaders{ + WithAsCodeUpdateEvents: true, + WithEnvironments: true, + WithApplications: true, + WithIcon: true, + WithIntegrations: true, + WithPipelines: true, + WithTemplate: true, + }, + }, + } + + for i, opt := range opts { + t.Run(fmt.Sprintf("test LoadAllWorkflows #%d", i), func(t *testing.T) { + wss, err := opt.LoadAll(context.TODO(), db) + for _, ws := range wss { + require.NotEmpty(t, ws.ProjectKey) + } + require.NoError(t, err) + }) + } +} diff --git a/engine/api/workflow/gorp_model.go b/engine/api/workflow/gorp_model.go index bc4223ab6c..d82fbfd136 100644 --- a/engine/api/workflow/gorp_model.go +++ b/engine/api/workflow/gorp_model.go @@ -9,8 +9,21 @@ import ( "github.com/ovh/cds/sdk" ) +type Workflows []Workflow + +func (ws Workflows) Get() sdk.Workflows { + res := make(sdk.Workflows, len(ws)) + for i, w := range ws { + res[i] = w.Get() + } + return res +} + // Workflow is a gorp wrapper around sdk.WorkflowData -type Workflow sdk.Workflow +type Workflow struct { + sdk.Workflow + ProjectKey string `db:"project_key"` +} // Notification is a gorp wrapper around sdk.WorkflowNotification type Notification sdk.WorkflowNotification diff --git a/engine/api/workflow/process_node.go b/engine/api/workflow/process_node.go index b87803242d..6873aeec72 100644 --- a/engine/api/workflow/process_node.go +++ b/engine/api/workflow/process_node.go @@ -257,7 +257,9 @@ func processNode(ctx context.Context, db gorp.SqlExecutor, store cache.Store, pr // CONDITION if !checkCondition(ctx, wr, n.Context.Conditions, nr.BuildParameters) { - log.Debug("Condition failed on processNode %d/%d %+v", wr.ID, n.ID, nr.BuildParameters) + log.Debug("Conditions failed on processNode %d/%d", wr.ID, n.ID) + log.Debug("Conditions was: %+v", n.Context.Conditions) + log.Debug("BuildParameters was: %+v", nr.BuildParameters) return nil, false, nil } diff --git a/engine/api/workflow/resync_workflow.go b/engine/api/workflow/resync_workflow.go index 104eb6770e..f80bc728ed 100644 --- a/engine/api/workflow/resync_workflow.go +++ b/engine/api/workflow/resync_workflow.go @@ -14,7 +14,6 @@ import ( func Resync(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj sdk.Project, wr *sdk.WorkflowRun) error { options := LoadOptions{ DeepPipeline: true, - Base64Keys: true, WithIntegrations: true, } wf, errW := LoadByID(ctx, db, store, proj, wr.Workflow.ID, options) diff --git a/engine/api/workflow/run_workflow.go b/engine/api/workflow/run_workflow.go index e6fc26b0e0..3e22ba02e5 100644 --- a/engine/api/workflow/run_workflow.go +++ b/engine/api/workflow/run_workflow.go @@ -53,7 +53,7 @@ func runFromHook(ctx context.Context, db gorp.SqlExecutor, store cache.Store, pr //If the hook is on the root, it will trigger a new workflow run //Else if will trigger a new subnumber of the last workflow run if h.NodeID == wr.Workflow.WorkflowData.Node.ID { - if err := IsValid(ctx, store, db, &wr.Workflow, proj, LoadOptions{DeepPipeline: true}); err != nil { + if err := CompleteWorkflow(ctx, db, &wr.Workflow, proj, LoadOptions{DeepPipeline: true}); err != nil { return nil, sdk.WrapError(err, "Unable to valid workflow") } @@ -178,7 +178,7 @@ func manualRun(ctx context.Context, db gorp.SqlExecutor, store cache.Store, proj ctx, end := observability.Span(ctx, "workflow.ManualRun", observability.Tag(observability.TagWorkflowRun, wr.Number)) defer end() - if err := IsValid(ctx, store, db, &wr.Workflow, proj, LoadOptions{DeepPipeline: true}); err != nil { + if err := CompleteWorkflow(ctx, db, &wr.Workflow, proj, LoadOptions{DeepPipeline: true}); err != nil { return nil, sdk.WrapError(err, "unable to valid workflow") } diff --git a/engine/api/workflow/run_workflow_test.go b/engine/api/workflow/run_workflow_test.go index 6b2ea2a719..230afe2047 100644 --- a/engine/api/workflow/run_workflow_test.go +++ b/engine/api/workflow/run_workflow_test.go @@ -107,6 +107,7 @@ func TestManualRun1(t *testing.T) { w1, err := workflow.Load(context.TODO(), db, cache, *proj, "test_1", workflow.LoadOptions{ DeepPipeline: true, }) + t.Logf("w1: %+v", w1) require.NoError(t, err) wr, errWR := workflow.CreateRun(db, w1, nil, u) diff --git a/engine/api/workflow/workflow_parser.go b/engine/api/workflow/workflow_parser.go index 75cd33b02c..82f6c1d204 100644 --- a/engine/api/workflow/workflow_parser.go +++ b/engine/api/workflow/workflow_parser.go @@ -65,7 +65,7 @@ func ParseAndImport(ctx context.Context, db gorp.SqlExecutor, store cache.Store, // Load deep pipelines if we come from workflow run ( so we have hook uuid ). // We need deep pipelines to be able to run stages/jobs - if err := IsValid(ctx, store, db, w, proj, LoadOptions{DeepPipeline: opts.HookUUID != ""}); err != nil { + if err := CompleteWorkflow(ctx, db, w, proj, LoadOptions{DeepPipeline: opts.HookUUID != ""}); err != nil { // Get spawn infos from error msg, ok := sdk.ErrorToMessage(err) if ok { diff --git a/engine/api/workflow_ascode.go b/engine/api/workflow_ascode.go index 02578e5bdf..b49cb2da04 100644 --- a/engine/api/workflow_ascode.go +++ b/engine/api/workflow_ascode.go @@ -105,7 +105,10 @@ func (api *API) postWorkflowAsCodeHandler() service.Handler { if err := workflow.RenameNode(ctx, api.mustDB(), &wf); err != nil { return err } - if err := workflow.IsValid(ctx, api.Cache, api.mustDB(), &wf, *p, workflow.LoadOptions{DeepPipeline: true}); err != nil { + if err := workflow.CheckValidity(ctx, api.mustDB(), &wf); err != nil { + return err + } + if err := workflow.CompleteWorkflow(ctx, api.mustDB(), &wf, *p, workflow.LoadOptions{DeepPipeline: true}); err != nil { return err } diff --git a/engine/api/workflow_export_test.go b/engine/api/workflow_export_test.go index 4761fb097d..36f9b5eb89 100644 --- a/engine/api/workflow_export_test.go +++ b/engine/api/workflow_export_test.go @@ -166,8 +166,12 @@ func Test_getWorkflowExportHandlerWithPermissions(t *testing.T) { Name: "Test_getWorkflowExportHandlerWithPermissions-Group2", } - require.NoError(t, group.Insert(context.TODO(), api.mustDB(), group2)) - group2, _ = group.LoadByName(context.TODO(), api.mustDB(), "Test_getWorkflowExportHandlerWithPermissions-Group2") + g, _ := group.LoadByName(context.TODO(), api.mustDB(), group2.Name) + if g != nil { + group2 = g + } else { + require.NoError(t, group.Insert(context.TODO(), api.mustDB(), group2)) + } //First pipeline pip := sdk.Pipeline{ diff --git a/engine/api/workflow_import.go b/engine/api/workflow_import.go index 856d0f4276..bca9ced867 100644 --- a/engine/api/workflow_import.go +++ b/engine/api/workflow_import.go @@ -62,9 +62,12 @@ func (api *API) postWorkflowPreviewHandler() service.Handler { return sdk.WrapError(err, "unable import workflow %s", ew.GetName()) } - // Browse all node to find IDs - if err := workflow.IsValid(ctx, api.Cache, api.mustDB(), wf, *proj, workflow.LoadOptions{}); err != nil { - return sdk.WrapError(err, "workflow is not valid") + if err := workflow.CompleteWorkflow(ctx, api.mustDB(), wf, *proj, workflow.LoadOptions{}); err != nil { + return err + } + + if err := workflow.CheckValidity(ctx, api.mustDB(), wf); err != nil { + return err } if err := workflow.RenameNode(ctx, api.mustDB(), wf); err != nil { diff --git a/engine/api/workflow_run.go b/engine/api/workflow_run.go index b040b02a28..07c4b0f560 100644 --- a/engine/api/workflow_run.go +++ b/engine/api/workflow_run.go @@ -889,7 +889,6 @@ func (api *API) postWorkflowRunHandler() service.Handler { var errWf error wf, errWf = workflow.Load(ctx, api.mustDB(), api.Cache, *p, name, workflow.LoadOptions{ DeepPipeline: true, - Base64Keys: true, WithAsCodeUpdateEvent: true, WithIcon: true, WithIntegrations: true, diff --git a/engine/api/workflow_run_test.go b/engine/api/workflow_run_test.go index 03764606db..5966843a18 100644 --- a/engine/api/workflow_run_test.go +++ b/engine/api/workflow_run_test.go @@ -1150,7 +1150,9 @@ func Test_postWorkflowRunAsyncFailedHandler(t *testing.T) { return writeError(w, err) } default: - return writeError(w, fmt.Errorf("route %s must not be called", r.URL.String())) + return writeError(w, sdk.NewError(sdk.ErrServiceUnavailable, + fmt.Errorf("route %s must not be called", r.URL.String()), + )) } return w, nil }, @@ -1176,6 +1178,8 @@ func Test_postWorkflowRunAsyncFailedHandler(t *testing.T) { x := ascode.UpdateAsCodeResult(context.TODO(), api.mustDB(), api.Cache, *proj, w1.ID, app, ed, u) require.NotNil(t, x, "ascodeEvent should not be nil, but it was") + t.Logf("UpdateAsCodeResult => %+v", x) + //Prepare request vars := map[string]string{ "key": proj.Key, @@ -1204,13 +1208,12 @@ func Test_postWorkflowRunAsyncFailedHandler(t *testing.T) { reqGet := assets.NewAuthentifiedRequest(t, u, pass, "GET", uriGet, nil) recGet := httptest.NewRecorder() router.Mux.ServeHTTP(recGet, reqGet) - + require.Equal(t, 200, recGet.Code) var wrGet sdk.WorkflowRun recGetBody := recGet.Body.Bytes() - assert.NoError(t, json.Unmarshal(recGetBody, &wrGet)) + require.NoError(t, json.Unmarshal(recGetBody, &wrGet)) if sdk.StatusIsTerminated(wrGet.Status) { - t.Logf("%+v", wrGet) assert.Equal(t, sdk.StatusFail, wrGet.Status) assert.Equal(t, 1, len(wrGet.Infos)) if len(wrGet.Infos) == 1 { diff --git a/engine/api/workflow_test.go b/engine/api/workflow_test.go index 9a982cc686..2d54250ce3 100644 --- a/engine/api/workflow_test.go +++ b/engine/api/workflow_test.go @@ -85,6 +85,7 @@ func Test_getWorkflowsHandler(t *testing.T) { wfList := []sdk.Workflow{} test.NoError(t, json.Unmarshal(w.Body.Bytes(), &wfList)) + require.Len(t, wfList, 1) for _, w := range wfList { assert.Equal(t, true, w.Permissions.Readable, "readable should be true") assert.Equal(t, true, w.Permissions.Writable, "writable should be true") @@ -1171,7 +1172,7 @@ func Test_postWorkflowRollbackHandler(t *testing.T) { assert.NotEmpty(t, payload["git.branch"], "git.branch should not be empty") - test.NoError(t, workflow.IsValid(context.Background(), api.Cache, db, wf, *proj, workflow.LoadOptions{})) + test.NoError(t, workflow.CompleteWorkflow(context.Background(), db, wf, *proj, workflow.LoadOptions{})) eWf, err := exportentities.NewWorkflow(context.TODO(), *wf) test.NoError(t, err) @@ -1226,7 +1227,6 @@ func Test_postWorkflowRollbackHandler(t *testing.T) { } func Test_postAndDeleteWorkflowLabelHandler(t *testing.T) { - api, db, router, end := newTestAPI(t) defer end() @@ -1341,11 +1341,11 @@ func Test_postAndDeleteWorkflowLabelHandler(t *testing.T) { wfUpdated, errW := workflow.Load(context.TODO(), db, api.Cache, *proj, wf.Name, workflow.LoadOptions{WithLabels: true}) test.NoError(t, errW) - assert.NotNil(t, wfUpdated.Labels) - assert.Equal(t, 1, len(wfUpdated.Labels)) - assert.Equal(t, lbl1.Name, wfUpdated.Labels[0].Name) + require.NotNil(t, wfUpdated.Labels) + require.Equal(t, 1, len(wfUpdated.Labels)) + require.Equal(t, lbl1.Name, wfUpdated.Labels[0].Name) - //Unlink label + // Unlink label vars = map[string]string{ "key": proj.Key, "permWorkflowName": name, @@ -1361,9 +1361,8 @@ func Test_postAndDeleteWorkflowLabelHandler(t *testing.T) { assert.Equal(t, 200, w.Code) wfUpdated, errW = workflow.Load(context.TODO(), db, api.Cache, *proj, wf.Name, workflow.LoadOptions{WithLabels: true}) - test.NoError(t, errW) - assert.NotNil(t, wfUpdated.Labels) - assert.Equal(t, 0, len(wfUpdated.Labels)) + require.NoError(t, errW) + require.Equal(t, 0, len(wfUpdated.Labels)) } func Test_deleteWorkflowHandler(t *testing.T) { @@ -1909,3 +1908,86 @@ func Test_getWorkflowsHandler_FilterByRepo(t *testing.T) { require.Equal(t, pip.ID, wfs[0].WorkflowData.Node.Context.PipelineID) } + +func Test_getSearchWorkflowHandler(t *testing.T) { + api, tsURL, tsClose := newTestServer(t) + defer tsClose() + + admin, _ := assets.InsertAdminUser(t, api.mustDB()) + localConsumer, err := authentication.LoadConsumerByTypeAndUserID(context.TODO(), api.mustDB(), sdk.ConsumerLocal, admin.ID, authentication.LoadConsumerOptions.WithAuthentifiedUser) + require.NoError(t, err) + + _, jws, err := builtin.NewConsumer(context.TODO(), api.mustDB(), sdk.RandomString(10), sdk.RandomString(10), localConsumer, admin.GetGroupIDs(), + sdk.NewAuthConsumerScopeDetails(sdk.AuthConsumerScopeProject)) + + u, _ := assets.InsertLambdaUser(t, api.mustDB()) + + pkey := sdk.RandomString(10) + proj := assets.InsertTestProject(t, api.mustDB(), api.Cache, pkey, pkey) + require.NoError(t, group.InsertLinkGroupUser(context.TODO(), api.mustDB(), &group.LinkGroupUser{ + GroupID: proj.ProjectGroups[0].Group.ID, + AuthentifiedUserID: u.ID, + Admin: true, + })) + + repofullName := sdk.RandomString(20) + + app := &sdk.Application{ + Name: sdk.RandomString(10), + RepositoryFullname: "ovh/" + repofullName, + } + require.NoError(t, application.Insert(api.mustDB(), *proj, app)) + + pip := sdk.Pipeline{ + ProjectID: proj.ID, + ProjectKey: proj.Key, + Name: "pip1", + } + test.NoError(t, pipeline.InsertPipeline(api.mustDB(), &pip)) + + wf := sdk.Workflow{ + Name: "workflow1", + ProjectID: proj.ID, + ProjectKey: proj.Key, + WorkflowData: sdk.WorkflowData{ + Node: sdk.Node{ + Name: "root", + Context: &sdk.NodeContext{ + PipelineID: pip.ID, + ApplicationID: app.ID, + }, + }, + }, + } + test.NoError(t, workflow.Insert(context.TODO(), api.mustDB(), api.Cache, *proj, &wf)) + + wf2 := sdk.Workflow{ + Name: "workflow2", + ProjectID: proj.ID, + ProjectKey: proj.Key, + WorkflowData: sdk.WorkflowData{ + Node: sdk.Node{ + Name: "root", + Context: &sdk.NodeContext{ + PipelineID: pip.ID, + }, + }, + }, + } + test.NoError(t, workflow.Insert(context.TODO(), api.mustDB(), api.Cache, *proj, &wf2)) + + // Call with an admin + sdkclientAdmin := cdsclient.New(cdsclient.Config{ + Host: tsURL, + BuitinConsumerAuthenticationToken: jws, + }) + + wfs, err := sdkclientAdmin.WorkflowSearch(cdsclient.WithQueryParameter("repository", "ovh/"+repofullName)) + require.NoError(t, err) + require.Len(t, wfs, 1) + require.Equal(t, wf.Name, wfs[0].Name) + require.NotEmpty(t, wfs[0].URLs.APIURL) + require.NotEmpty(t, wfs[0].URLs.UIURL) + require.Equal(t, app.ID, wfs[0].WorkflowData.Node.Context.ApplicationID) + require.Equal(t, pip.ID, wfs[0].WorkflowData.Node.Context.PipelineID) +} diff --git a/engine/api/workflowtemplate/dao_instance.go b/engine/api/workflowtemplate/dao_instance.go index 643015a314..84b4c7cfc6 100644 --- a/engine/api/workflowtemplate/dao_instance.go +++ b/engine/api/workflowtemplate/dao_instance.go @@ -4,6 +4,8 @@ import ( "context" "strings" + "github.com/lib/pq" + "github.com/go-gorp/gorp" "github.com/ovh/cds/engine/api/database/gorpmapping" @@ -94,6 +96,16 @@ func LoadInstancesByWorkflowIDs(ctx context.Context, db gorp.SqlExecutor, workfl return getInstances(ctx, db, query, opts...) } +// LoadInstanceByWorkflowIDs returns a workflow template instance by workflow ids. +func LoadInstanceByWorkflowIDs(ctx context.Context, db gorp.SqlExecutor, workflowID []int64, opts ...LoadInstanceOptionFunc) ([]sdk.WorkflowTemplateInstance, error) { + query := gorpmapping.NewQuery(` + SELECT * + FROM workflow_template_instance + WHERE workflow_id = ANY($1) + `).Args(pq.Int64Array(workflowID)) + return getInstances(ctx, db, query, opts...) +} + // LoadInstanceByWorkflowID returns a workflow template instance by workflow id. func LoadInstanceByWorkflowID(ctx context.Context, db gorp.SqlExecutor, workflowID int64, opts ...LoadInstanceOptionFunc) (*sdk.WorkflowTemplateInstance, error) { query := gorpmapping.NewQuery(` diff --git a/go.mod b/go.mod index 167e985439..f60af72d70 100644 --- a/go.mod +++ b/go.mod @@ -180,6 +180,7 @@ require ( github.com/xanzy/go-gitlab v0.15.0 github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/yesnault/go-toml v0.0.0-20191205182532-f5ef6cee7945 + github.com/yesnault/gorp v2.0.0+incompatible // indirect github.com/yuin/gluare v0.0.0-20170607022532-d7c94f1a80ed github.com/yuin/gopher-lua v0.0.0-20170901023928-8c2befcd3908 github.com/ziutek/mymysql v1.5.4 // indirect diff --git a/go.sum b/go.sum index 7ebd74366f..47b7572174 100644 --- a/go.sum +++ b/go.sum @@ -563,6 +563,8 @@ github.com/yesnault/go-keychain v0.0.0-20190829085436-f78f7ae28786 h1:3i+IgAiigX github.com/yesnault/go-keychain v0.0.0-20190829085436-f78f7ae28786/go.mod h1:EzotOW21xnO2Lld4dB8qr2bSnzi3nGtE9FG33n/y2Q8= github.com/yesnault/go-toml v0.0.0-20191205182532-f5ef6cee7945 h1:icS0gqYJLvPFpni3gxPml7gCSdCcJx72RrbkVgmvw80= github.com/yesnault/go-toml v0.0.0-20191205182532-f5ef6cee7945/go.mod h1:SsMrIuedaKvK8GekjwjVv8gMnHNAuvoINpKCARzdsEQ= +github.com/yesnault/gorp v2.0.0+incompatible h1:4pwtvWrXQTAgGVnzAB0X5jX/mWRQEa8zmcdNT5U8jyg= +github.com/yesnault/gorp v2.0.0+incompatible/go.mod h1:7nhqtxBPZoPXx86SqUzP/OFLd8prnhkydPk51uJthQc= github.com/yesnault/gorp v2.0.1-0.20200325154225-2dc6d8c2da37+incompatible h1:8M+L4IKModOdc0ZsH0kHJiqZ7CmWF4yH8yy4InZxKyY= github.com/yesnault/gorp v2.0.1-0.20200325154225-2dc6d8c2da37+incompatible/go.mod h1:7nhqtxBPZoPXx86SqUzP/OFLd8prnhkydPk51uJthQc= github.com/yuin/gluare v0.0.0-20170607022532-d7c94f1a80ed h1:I1vcLHWU9m30rA90rMrKPu0eD3NDA4FBlkB8WMaDyUw= diff --git a/sdk/cdsclient/client_workflow.go b/sdk/cdsclient/client_workflow.go index 995c7ca67f..2151ad9a6d 100644 --- a/sdk/cdsclient/client_workflow.go +++ b/sdk/cdsclient/client_workflow.go @@ -15,6 +15,15 @@ import ( "github.com/ovh/cds/sdk" ) +func (c *client) WorkflowSearch(opts ...RequestModifier) ([]sdk.Workflow, error) { + url := fmt.Sprintf("/workflow/search") + w := []sdk.Workflow{} + if _, err := c.GetJSON(context.Background(), url, &w, opts...); err != nil { + return nil, err + } + return w, nil +} + func (c *client) WorkflowList(projectKey string, opts ...RequestModifier) ([]sdk.Workflow, error) { url := fmt.Sprintf("/project/%s/workflows", projectKey) w := []sdk.Workflow{} diff --git a/sdk/cdsclient/interface.go b/sdk/cdsclient/interface.go index c12d2e5d82..fdd47dafe2 100644 --- a/sdk/cdsclient/interface.go +++ b/sdk/cdsclient/interface.go @@ -294,6 +294,7 @@ type HookClient interface { // WorkflowClient exposes workflows functions type WorkflowClient interface { + WorkflowSearch(opts ...RequestModifier) ([]sdk.Workflow, error) WorkflowList(projectKey string, opts ...RequestModifier) ([]sdk.Workflow, error) WorkflowGet(projectKey, name string, opts ...RequestModifier) (*sdk.Workflow, error) WorkflowUpdate(projectKey, name string, wf *sdk.Workflow) error diff --git a/sdk/cdsclient/mock_cdsclient/interface_mock.go b/sdk/cdsclient/mock_cdsclient/interface_mock.go index c329318570..899437cc75 100644 --- a/sdk/cdsclient/mock_cdsclient/interface_mock.go +++ b/sdk/cdsclient/mock_cdsclient/interface_mock.go @@ -12,7 +12,7 @@ import ( sdk "github.com/ovh/cds/sdk" cdsclient "github.com/ovh/cds/sdk/cdsclient" venom "github.com/ovh/venom" - coverage "github.com/sguiheux/go-coverage" + go_coverage "github.com/sguiheux/go-coverage" io "io" http "net/http" reflect "reflect" @@ -3010,7 +3010,7 @@ func (mr *MockQueueClientMockRecorder) QueueJobSendSpawnInfo(ctx, id, in interfa } // QueueSendCoverage mocks base method -func (m *MockQueueClient) QueueSendCoverage(ctx context.Context, id int64, report coverage.Report) error { +func (m *MockQueueClient) QueueSendCoverage(ctx context.Context, id int64, report go_coverage.Report) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "QueueSendCoverage", ctx, id, report) ret0, _ := ret[0].(error) @@ -3556,6 +3556,25 @@ func (m *MockWorkflowClient) EXPECT() *MockWorkflowClientMockRecorder { return m.recorder } +// WorkflowSearch mocks base method +func (m *MockWorkflowClient) WorkflowSearch(opts ...cdsclient.RequestModifier) ([]sdk.Workflow, error) { + m.ctrl.T.Helper() + varargs := []interface{}{} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "WorkflowSearch", varargs...) + ret0, _ := ret[0].([]sdk.Workflow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// WorkflowSearch indicates an expected call of WorkflowSearch +func (mr *MockWorkflowClientMockRecorder) WorkflowSearch(opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WorkflowSearch", reflect.TypeOf((*MockWorkflowClient)(nil).WorkflowSearch), opts...) +} + // WorkflowList mocks base method func (m *MockWorkflowClient) WorkflowList(projectKey string, opts ...cdsclient.RequestModifier) ([]sdk.Workflow, error) { m.ctrl.T.Helper() @@ -6712,7 +6731,7 @@ func (mr *MockInterfaceMockRecorder) QueueJobSendSpawnInfo(ctx, id, in interface } // QueueSendCoverage mocks base method -func (m *MockInterface) QueueSendCoverage(ctx context.Context, id int64, report coverage.Report) error { +func (m *MockInterface) QueueSendCoverage(ctx context.Context, id int64, report go_coverage.Report) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "QueueSendCoverage", ctx, id, report) ret0, _ := ret[0].(error) @@ -7209,6 +7228,25 @@ func (mr *MockInterfaceMockRecorder) WorkerSetStatus(ctx, status interface{}) *g return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WorkerSetStatus", reflect.TypeOf((*MockInterface)(nil).WorkerSetStatus), ctx, status) } +// WorkflowSearch mocks base method +func (m *MockInterface) WorkflowSearch(opts ...cdsclient.RequestModifier) ([]sdk.Workflow, error) { + m.ctrl.T.Helper() + varargs := []interface{}{} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "WorkflowSearch", varargs...) + ret0, _ := ret[0].([]sdk.Workflow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// WorkflowSearch indicates an expected call of WorkflowSearch +func (mr *MockInterfaceMockRecorder) WorkflowSearch(opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WorkflowSearch", reflect.TypeOf((*MockInterface)(nil).WorkflowSearch), opts...) +} + // WorkflowList mocks base method func (m *MockInterface) WorkflowList(projectKey string, opts ...cdsclient.RequestModifier) ([]sdk.Workflow, error) { m.ctrl.T.Helper() @@ -8190,7 +8228,7 @@ func (mr *MockWorkerInterfaceMockRecorder) QueueJobSendSpawnInfo(ctx, id, in int } // QueueSendCoverage mocks base method -func (m *MockWorkerInterface) QueueSendCoverage(ctx context.Context, id int64, report coverage.Report) error { +func (m *MockWorkerInterface) QueueSendCoverage(ctx context.Context, id int64, report go_coverage.Report) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "QueueSendCoverage", ctx, id, report) ret0, _ := ret[0].(error) diff --git a/sdk/notif.go b/sdk/notif.go index 077e0e3918..5f857b8571 100644 --- a/sdk/notif.go +++ b/sdk/notif.go @@ -2,11 +2,15 @@ package sdk import ( "bytes" + "database/sql/driver" + "encoding/json" + "fmt" "text/template" "time" - "github.com/ovh/cds/sdk/interpolate" "github.com/ovh/venom" + + "github.com/ovh/cds/sdk/interpolate" ) //const @@ -44,6 +48,24 @@ type UserNotificationSettings struct { Conditions WorkflowNodeConditions `json:"conditions,omitempty" yaml:"conditions,omitempty"` } +// Value returns driver.Value from Metadata. +func (a UserNotificationSettings) Value() (driver.Value, error) { + j, err := json.Marshal(a) + return j, WrapError(err, "cannot marshal UserNotificationSettings") +} + +// Scan UserNotificationSettings. +func (a *UserNotificationSettings) Scan(src interface{}) error { + if src == nil { + return nil + } + source, ok := src.([]byte) + if !ok { + return WithStack(fmt.Errorf("type assertion .([]byte) failed (%T)", src)) + } + return WrapError(json.Unmarshal(source, a), "cannot unmarshal UserNotificationSettings") +} + // UserNotificationTemplate is the notification content type UserNotificationTemplate struct { Subject string `json:"subject,omitempty" yaml:"subject,omitempty"` diff --git a/sdk/usage.go b/sdk/usage.go index d6cd24f833..a1e16f97d4 100644 --- a/sdk/usage.go +++ b/sdk/usage.go @@ -2,8 +2,8 @@ package sdk // Usage is type to represent usage of each type type Usage struct { - Workflows []Workflow `json:"workflows,omitempty"` - Environments []Environment `json:"environments,omitempty"` - Pipelines []Pipeline `json:"pipelines,omitempty"` - Applications []Application `json:"applications,omitempty"` + Workflows []WorkflowName `json:"workflows,omitempty"` + Environments []Environment `json:"environments,omitempty"` + Pipelines []Pipeline `json:"pipelines,omitempty"` + Applications []Application `json:"applications,omitempty"` } diff --git a/sdk/workflow.go b/sdk/workflow.go index c4db276c1c..3eddb57d40 100644 --- a/sdk/workflow.go +++ b/sdk/workflow.go @@ -19,6 +19,13 @@ const ( // ColorRegexp represent the regexp for a format to hexadecimal color var ColorRegexp = regexp.MustCompile(`^#\w{3,8}$`) +type WorkflowName struct { + ID int64 `json:"id" db:"id" cli:"-"` + Name string `json:"name" db:"name" cli:"name,key"` + ProjectKey string `json:"project_key" db:"project_key" cli:"project_key"` + ProjectID int64 `json:"project_id" db:"project_id" cli:"-"` +} + //Workflow represents a pipeline based workflow type Workflow struct { ID int64 `json:"id" db:"id" cli:"-"` @@ -30,10 +37,10 @@ type Workflow struct { ProjectKey string `json:"project_key" db:"-" cli:"-"` Groups []GroupPermission `json:"groups,omitempty" db:"-" cli:"-"` Permissions Permissions `json:"permissions" db:"-" cli:"-"` - Metadata Metadata `json:"metadata,omitempty" yaml:"metadata" db:"-"` + Metadata Metadata `json:"metadata,omitempty" yaml:"metadata" db:"metadata"` Usage *Usage `json:"usage,omitempty" db:"-" cli:"-"` HistoryLength int64 `json:"history_length" db:"history_length" cli:"-"` - PurgeTags []string `json:"purge_tags,omitempty" db:"-" cli:"-"` + PurgeTags PurgeTags `json:"purge_tags,omitempty" db:"purge_tags" cli:"-"` Notifications []WorkflowNotification `json:"notifications,omitempty" db:"-" cli:"-"` FromRepository string `json:"from_repository,omitempty" db:"from_repository" cli:"from"` DerivedFromWorkflowID int64 `json:"derived_from_workflow_id,omitempty" db:"derived_from_workflow_id" cli:"-"` @@ -49,7 +56,7 @@ type Workflow struct { Labels []Label `json:"labels,omitempty" db:"-" cli:"labels"` ToDelete bool `json:"to_delete" db:"to_delete" cli:"-"` Favorite bool `json:"favorite" db:"-" cli:"favorite"` - WorkflowData WorkflowData `json:"workflow_data" db:"-" cli:"-"` + WorkflowData WorkflowData `json:"workflow_data" db:"workflow_data" cli:"-"` EventIntegrations []ProjectIntegration `json:"event_integrations,omitempty" db:"-" cli:"-"` AsCodeEvent []AsCodeEvent `json:"as_code_events,omitempty" db:"-" cli:"-"` // aggregates @@ -59,8 +66,54 @@ type Workflow struct { URLs URL `json:"urls" yaml:"-" db:"-" cli:"-"` } +type PurgeTags []string + +// Value returns driver.Value from PurgeTags. +func (a PurgeTags) Value() (driver.Value, error) { + j, err := json.Marshal(a) + return j, WrapError(err, "cannot marshal Metadata") +} + +// Scan PurgeTags. +func (a *PurgeTags) Scan(src interface{}) error { + if src == nil { + return nil + } + source, ok := src.([]byte) + if !ok { + return WithStack(fmt.Errorf("type assertion .([]byte) failed (%T)", src)) + } + return WrapError(json.Unmarshal(source, a), "cannot unmarshal PurgeTags") +} + +// Value returns driver.Value from WorkflowData. +func (a WorkflowData) Value() (driver.Value, error) { + j, err := json.Marshal(a) + return j, WrapError(err, "cannot marshal WorkflowData") +} + +// Scan WorkflowData. +func (a *WorkflowData) Scan(src interface{}) error { + if src == nil { + return nil + } + source, ok := src.([]byte) + if !ok { + return WithStack(fmt.Errorf("type assertion .([]byte) failed (%T)", src)) + } + return WrapError(json.Unmarshal(source, a), "cannot unmarshal WorkflowData") +} + type Workflows []Workflow +func (workflows Workflows) IDs() []int64 { + var res = make([]int64, len(workflows)) + for i := range workflows { + res[i] = workflows[i].ID + } + return res +} + func (workflows Workflows) Names() []string { var res = make([]string, len(workflows)) for i := range workflows { @@ -79,6 +132,27 @@ func (workflows Workflows) Filter(f func(w Workflow) bool) Workflows { return res } +func (w *Workflow) InitMaps() { + if w.Pipelines == nil { + w.Pipelines = make(map[int64]Pipeline) + } + if w.Applications == nil { + w.Applications = make(map[int64]Application) + } + if w.Environments == nil { + w.Environments = make(map[int64]Environment) + } + if w.ProjectIntegrations == nil { + w.ProjectIntegrations = make(map[int64]ProjectIntegration) + } + if w.HookModels == nil { + w.HookModels = make(map[int64]WorkflowHookModel) + } + if w.OutGoingHookModels == nil { + w.OutGoingHookModels = make(map[int64]WorkflowHookModel) + } +} + // GetApplication retrieve application from workflow func (w *Workflow) GetApplication(ID int64) Application { return w.Applications[ID] @@ -153,6 +227,70 @@ func (w *Workflow) SortNode() { } } +// AssignEmptyType fill node type field +// This function should be called after completing the maps +func (w *Workflow) Normalize() { + w.InitMaps() + w.AssignEmptyType() + + nodesArray := w.WorkflowData.Array() + for i := range nodesArray { + n := nodesArray[i] + if n.Context == nil { + continue + } + + pip, ok := w.Pipelines[n.Context.PipelineID] + if ok { + n.Context.PipelineName = pip.Name + } + + app, ok := w.Applications[n.Context.ApplicationID] + if ok { + n.Context.ApplicationName = app.Name + } + + env, ok := w.Environments[n.Context.EnvironmentID] + if ok { + n.Context.EnvironmentName = env.Name + } + + integ, ok := w.ProjectIntegrations[n.Context.ProjectIntegrationID] + if ok { + n.Context.ProjectIntegrationName = integ.Name + } + + for i := range n.Hooks { + h := &n.Hooks[i] + hookModel, ok := w.HookModels[h.HookModelID] + if ok { + h.HookModelName = hookModel.Name + } + } + } + + // Set the node names in the notifactions + for i := range w.Notifications { + n := &w.Notifications[i] + if len(n.NodeIDs) != 0 { + n.SourceNodeRefs = nil + for _, id := range n.NodeIDs { + notifNode := w.WorkflowData.NodeByID(id) + if notifNode != nil { + n.SourceNodeRefs = append(n.SourceNodeRefs, notifNode.Name) + } + } + } else if len(n.SourceNodeRefs) != 0 { + for _, nodeName := range n.SourceNodeRefs { + notifNode := w.WorkflowData.NodeByName(nodeName) + if notifNode != nil { + n.NodeIDs = append(n.NodeIDs, notifNode.ID) + } + } + } + } +} + // AssignEmptyType fill node type field func (w *Workflow) AssignEmptyType() { // set node type for join diff --git a/tests/ITSCWRKFLW2-WORKFLOW.yml b/tests/ITSCWRKFLW2-WORKFLOW.yml index 71b6ead280..026dc3f1bd 100644 --- a/tests/ITSCWRKFLW2-WORKFLOW.yml +++ b/tests/ITSCWRKFLW2-WORKFLOW.yml @@ -1,5 +1,7 @@ name: ITSCWRKFLW2-WORKFLOW -version: v1.0 -pipeline: ITSCWRKFLW2-PIPELINE -application: ITSCWRKFLW2-ANOTHER-APPLICATION +version: v2.0 +workflow: + ITSCWRKFLW2-PIPELINE: + pipeline: ITSCWRKFLW2-PIPELINE + application: ITSCWRKFLW2-ANOTHER-APPLICATION