Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): worker model docker image whitelist #6841

Merged
merged 3 commits into from
Feb 19, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 29 additions & 11 deletions engine/api/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"net/url"
"os"
"os/signal"
"regexp"
"runtime/pprof"
"strings"
"time"
Expand Down Expand Up @@ -63,6 +64,7 @@ import (
"github.com/ovh/cds/sdk"
"github.com/ovh/cds/sdk/cdsclient"
"github.com/ovh/cds/sdk/jws"
cdslog "github.com/ovh/cds/sdk/log"
)

// Configuration is the configuration structure for CDS API
Expand Down Expand Up @@ -234,13 +236,14 @@ type Configuration struct {
Error string `toml:"error" comment:"Help displayed to user on each error. Warning: this message could be view by anonymous user. Markdown accepted." json:"error" default:""`
} `toml:"help" comment:"######################\n 'Help' informations \n######################" json:"help"`
Workflow struct {
MaxRuns int64 `toml:"maxRuns" comment:"Maximum of runs by workflow" json:"maxRuns" default:"255"`
DefaultRetentionPolicy string `toml:"defaultRetentionPolicy" comment:"Default rule for workflow run retention policy, this rule can be overridden on each workflow.\n Example: 'return run_days_before < 365' keeps runs for one year." json:"defaultRetentionPolicy" default:"return run_days_before < 365"`
DisablePurgeDeletion bool `toml:"disablePurgeDeletion" comment:"Allow you to disable the deletion part of the purge. Workflow run will only be marked as delete" json:"disablePurgeDeletion" default:"false"`
TemplateBulkRunnerCount int64 `toml:"templateBulkRunnerCount" comment:"The count of runner that will execute the workflow template bulk operation." json:"templateBulkRunnerCount" default:"10"`
JobDefaultRegion string `toml:"jobDefaultRegion" comment:"The default region where the job will be sent if no one is defined on a job" json:"jobDefaultRegion"`
JobDefaultBookDelay int64 `toml:"jobDefaultBookDelay" comment:"The default book delay for a job in queue (in seconds)" json:"jobDefaultBookDelay" default:"120"`
CustomServiceJobBookDelay map[string]int64 `toml:"customServiceJobBookDelay" comment:"Set custom job book delay for given CDS Hatchery (in seconds)" json:"customServiceJobBookDelay" commented:"true"`
MaxRuns int64 `toml:"maxRuns" comment:"Maximum of runs by workflow" json:"maxRuns" default:"255"`
DefaultRetentionPolicy string `toml:"defaultRetentionPolicy" comment:"Default rule for workflow run retention policy, this rule can be overridden on each workflow.\n Example: 'return run_days_before < 365' keeps runs for one year." json:"defaultRetentionPolicy" default:"return run_days_before < 365"`
DisablePurgeDeletion bool `toml:"disablePurgeDeletion" comment:"Allow you to disable the deletion part of the purge. Workflow run will only be marked as delete" json:"disablePurgeDeletion" default:"false"`
TemplateBulkRunnerCount int64 `toml:"templateBulkRunnerCount" comment:"The count of runner that will execute the workflow template bulk operation." json:"templateBulkRunnerCount" default:"10"`
JobDefaultRegion string `toml:"jobDefaultRegion" comment:"The default region where the job will be sent if no one is defined on a job" json:"jobDefaultRegion"`
JobDefaultBookDelay int64 `toml:"jobDefaultBookDelay" comment:"The default book delay for a job in queue (in seconds)" json:"jobDefaultBookDelay" default:"120"`
CustomServiceJobBookDelay map[string]int64 `toml:"customServiceJobBookDelay" comment:"Set custom job book delay for given CDS Hatchery (in seconds)" json:"customServiceJobBookDelay" commented:"true"`
WorkerModelDockerImageWhiteList []string `toml:"workerModelDockerImageWhiteList" comment:"White list for docker image worker model " json:"workerModelDockerImageWhiteList" commented:"true"`
} `toml:"workflow" comment:"######################\n 'Workflow' global configuration \n######################" json:"workflow"`
WorkflowV2 struct {
JobSchedulingTimeout int64 `toml:"jobSchedulingTimeout" comment:"Timeout delay for job scheduling (in seconds)" json:"jobSchedulingTimeout" default:"600"`
Expand Down Expand Up @@ -332,10 +335,11 @@ type API struct {
RunResultSynchronized *stats.Int64Measure
RunResultSynchronizedError *stats.Int64Measure
}
workflowRunCraftChan chan string
workflowRunTriggerChan chan sdk.V2WorkflowRunEnqueue
AuthenticationDrivers map[sdk.AuthConsumerType]sdk.AuthDriver
LinkDrivers map[sdk.AuthConsumerType]link.LinkDriver
workflowRunCraftChan chan string
workflowRunTriggerChan chan sdk.V2WorkflowRunEnqueue
AuthenticationDrivers map[sdk.AuthConsumerType]sdk.AuthDriver
LinkDrivers map[sdk.AuthConsumerType]link.LinkDriver
WorkerModelDockerImageWhiteList []regexp.Regexp
}

// ApplyConfiguration apply an object of type api.Configuration after checking it
Expand Down Expand Up @@ -486,6 +490,9 @@ type StartupConfigConsumer struct {
// Serve will start the http api server
func (a *API) Serve(ctx context.Context) error {

// Skip this verbose log
log.Skip(cdslog.Handler, "api.(*API).postServiceHearbeatHandler-fm.(*API).postServiceHearbeatHandler")

log.Info(ctx, "Starting CDS API Server %s", sdk.VERSION)

a.StartupTime = time.Now()
Expand Down Expand Up @@ -614,6 +621,17 @@ func (a *API) Serve(ctx context.Context) error {
return sdk.WrapError(err, "cannot connect to cache store")
}

// Manage worker model docker image whitelist
if len(a.Config.Workflow.WorkerModelDockerImageWhiteList) > 0 {
for _, s := range a.Config.Workflow.WorkerModelDockerImageWhiteList {
r, err := regexp.Compile(s)
if err != nil {
return sdk.WrapError(err, "wront WorkerModelDockerImageWhiteList regexp %q", s)
}
a.WorkerModelDockerImageWhiteList = append(a.WorkerModelDockerImageWhiteList, *r)
}
}

a.GoRoutines = sdk.NewGoRoutines(ctx)

log.Info(ctx, "Running migration")
Expand Down
84 changes: 81 additions & 3 deletions engine/api/v2_repository_analyze.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,21 @@ import (
"compress/gzip"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path/filepath"
"regexp"
"sort"
"strings"
"time"

"github.com/go-gorp/gorp"
"github.com/gorilla/mux"
"github.com/rockbears/log"
"github.com/rockbears/yaml"
"go.opencensus.io/trace"

"github.com/ovh/cds/engine/api/database/gorpmapping"
Expand Down Expand Up @@ -990,13 +993,13 @@ func (api *API) handleEntitiesFiles(_ context.Context, filesContent map[string][
switch {
case strings.HasPrefix(filePath, ".cds/worker-models/"):
var wms []sdk.V2WorkerModel
es, err = sdk.ReadEntityFile(dir, fileName, content, &wms, sdk.EntityTypeWorkerModel, *analysis)
es, err = ReadEntityFile(api, dir, fileName, content, &wms, sdk.EntityTypeWorkerModel, *analysis)
case strings.HasPrefix(filePath, ".cds/actions/"):
var actions []sdk.V2Action
es, err = sdk.ReadEntityFile(dir, fileName, content, &actions, sdk.EntityTypeAction, *analysis)
es, err = ReadEntityFile(api, dir, fileName, content, &actions, sdk.EntityTypeAction, *analysis)
case strings.HasPrefix(filePath, ".cds/workflows/"):
var w []sdk.V2Workflow
es, err = sdk.ReadEntityFile(dir, fileName, content, &w, sdk.EntityTypeWorkflow, *analysis)
es, err = ReadEntityFile(api, dir, fileName, content, &w, sdk.EntityTypeWorkflow, *analysis)
default:
continue
}
Expand All @@ -1013,6 +1016,81 @@ func (api *API) handleEntitiesFiles(_ context.Context, filesContent map[string][

}

func Lint[T sdk.Lintable](api *API, o T) []error {
// 1. Static lint
if err := o.Lint(); err != nil {
return err
}
// 2. Lint againt some API specific rules
switch x := any(o).(type) {

case sdk.V2WorkerModel:
// 2.1 Validate docker image against the whitelist from API configuration
var dockerSpec sdk.V2WorkerModelDockerSpec
if err := json.Unmarshal(x.Spec, &dockerSpec); err != nil {
// Check only docker spec, so we skipp other errors
break
}
// Verify the image if any whitelist is setup
if dockerSpec.Image != "" && len(api.WorkerModelDockerImageWhiteList) > 0 {
var allowedImage = false
for _, r := range api.WorkerModelDockerImageWhiteList { // At least one regexp must match
if r.MatchString(dockerSpec.Image) {
allowedImage = true
break
}
}
if !allowedImage {
return []error{sdk.WithStack(sdk.ErrWrongRequest)}
}
}
}
return nil
}

func ReadEntityFile[T sdk.Lintable](api *API, directory, fileName string, content []byte, out *[]T, t string, analysis sdk.ProjectRepositoryAnalysis) ([]sdk.EntityWithObject, []error) {
namePattern, err := regexp.Compile(sdk.EntityNamePattern)
if err != nil {
return nil, []error{sdk.WrapError(err, "unable to compile regexp %s", namePattern)}
}

if err := yaml.UnmarshalMultipleDocuments(content, out); err != nil {
return nil, []error{sdk.NewErrorFrom(sdk.ErrInvalidData, "unable to read %s%s: %v", directory, fileName, err)}
}
var entities []sdk.EntityWithObject
for _, o := range *out {
if err := o.Lint(); err != nil {
return nil, err
}
eo := sdk.EntityWithObject{
Entity: sdk.Entity{
Data: string(content),
Name: o.GetName(),
Ref: analysis.Ref,
Commit: analysis.Commit,
ProjectKey: analysis.ProjectKey,
ProjectRepositoryID: analysis.ProjectRepositoryID,
Type: t,
FilePath: directory + fileName,
},
}
if !namePattern.MatchString(o.GetName()) {
return nil, []error{sdk.WrapError(sdk.ErrInvalidData, "name %s doesn't match %s", o.GetName(), sdk.EntityNamePattern)}
}
switch t {
case sdk.EntityTypeWorkerModel:
eo.Model = any(o).(sdk.V2WorkerModel)
case sdk.EntityTypeAction:
eo.Action = any(o).(sdk.V2Action)
case sdk.EntityTypeWorkflow:
eo.Workflow = any(o).(sdk.V2Workflow)
}

entities = append(entities, eo)
}
return entities, nil
}

// analyzeCommitSignatureThroughVcsAPI analyzes commit.
func (api *API) analyzeCommitSignatureThroughVcsAPI(ctx context.Context, analysis sdk.ProjectRepositoryAnalysis, vcsProject sdk.VCSProject, repoWithSecret sdk.ProjectRepository) (string, string, error) {
var keyID, analyzesError string
Expand Down
28 changes: 28 additions & 0 deletions engine/api/worker_model.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,20 @@ func (api *API) postWorkerModelHandler() service.Handler {
return err
}

// Verify the image if any whitelist is setup
if data.ModelDocker.Image != "" && len(api.WorkerModelDockerImageWhiteList) > 0 {
var allowedImage = false
for _, r := range api.WorkerModelDockerImageWhiteList { // At least one regexp must match
if r.MatchString(data.ModelDocker.Image) {
allowedImage = true
break
}
}
if !allowedImage {
return sdk.WithStack(sdk.ErrWrongRequest)
}
}

// check that given group id exits and that the user is admin of the group
grp, err := group.LoadByID(ctx, api.mustDB(), data.GroupID, group.LoadOptions.WithMembers)
if err != nil {
Expand Down Expand Up @@ -107,6 +121,20 @@ func (api *API) putWorkerModelHandler() service.Handler {
return err
}

// Verify the image if any whitelist is setup
if data.ModelDocker.Image != "" && len(api.WorkerModelDockerImageWhiteList) > 0 {
var allowedImage = false
for _, r := range api.WorkerModelDockerImageWhiteList { // At least one regexp must match
if r.MatchString(data.ModelDocker.Image) {
allowedImage = true
break
}
}
if !allowedImage {
return sdk.WithStack(sdk.ErrWrongRequest)
}
}

if old.GroupID != data.GroupID {
// check that given group id exits and that the user is admin of the group
grp, err := group.LoadByID(ctx, api.mustDB(), data.GroupID, group.LoadOptions.WithMembers)
Expand Down
36 changes: 36 additions & 0 deletions engine/api/worker_model_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"fmt"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"

Expand Down Expand Up @@ -93,6 +94,41 @@ func Test_postWorkerModelAsAdmin(t *testing.T) {
assert.Equal(t, "THIS IS A TEST", newModel.ModelDocker.Envs["CDS_TEST"], "Worker model envs are not good")
}

func Test_postWorkerModelAsAdminWithWhiteList(t *testing.T) {
Test_DeleteAllWorkerModels(t)

api, db, _ := newTestAPI(t)
r := regexp.MustCompile("notbuildpack.*")
api.WorkerModelDockerImageWhiteList = append(api.WorkerModelDockerImageWhiteList, *r)

_, jwtRaw := assets.InsertAdminUser(t, db)

groupShared, err := group.LoadByName(context.TODO(), api.mustDB(), sdk.SharedInfraGroupName)
require.NoError(t, err)

model := sdk.Model{
Name: "Test1",
GroupID: groupShared.ID,
Type: sdk.Docker,
ModelDocker: sdk.ModelDocker{
Image: "buildpack-deps:jessie",
Shell: "sh -c",
Cmd: "worker --api={{.API}}",
Envs: map[string]string{
"CDS_TEST": "THIS IS A TEST",
},
},
}

// Send POST model request
uri := api.Router.GetRoute("POST", api.postWorkerModelHandler, nil)
test.NotEmpty(t, uri)
req := assets.NewJWTAuthentifiedRequest(t, jwtRaw, "POST", uri, model)
w := httptest.NewRecorder()
api.Router.Mux.ServeHTTP(w, req)
assert.Equal(t, 400, w.Code)
}

func Test_addWorkerModelWithPrivateRegistryAsAdmin(t *testing.T) {
api, db, _ := newTestAPI(t)

Expand Down
2 changes: 1 addition & 1 deletion engine/cache/redis.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ func NewRedisStore(host, password string, dbindex, ttl int) (*RedisStore, error)

pong, err := client.Ping().Result()
if err != nil {
return nil, sdk.WithStack(err)
return nil, sdk.WrapError(err, "unable to connect to redis %s:%d", host, dbindex)
}
if pong != "PONG" {
return nil, fmt.Errorf("cannot ping Redis on %s", host)
Expand Down
46 changes: 0 additions & 46 deletions sdk/entity.go
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
package sdk

import (
"regexp"
"time"

"github.com/rockbears/yaml"
)

const (
Expand Down Expand Up @@ -67,49 +64,6 @@ type Lintable interface {
GetName() string
}

func ReadEntityFile[T Lintable](directory, fileName string, content []byte, out *[]T, t string, analysis ProjectRepositoryAnalysis) ([]EntityWithObject, []error) {
namePattern, err := regexp.Compile(EntityNamePattern)
if err != nil {
return nil, []error{WrapError(err, "unable to compile regexp %s", namePattern)}
}

if err := yaml.UnmarshalMultipleDocuments(content, out); err != nil {
return nil, []error{NewErrorFrom(ErrInvalidData, "unable to read %s%s: %v", directory, fileName, err)}
}
var entities []EntityWithObject
for _, o := range *out {
if err := o.Lint(); err != nil {
return nil, err
}
eo := EntityWithObject{
Entity: Entity{
Data: string(content),
Name: o.GetName(),
Ref: analysis.Ref,
Commit: analysis.Commit,
ProjectKey: analysis.ProjectKey,
ProjectRepositoryID: analysis.ProjectRepositoryID,
Type: t,
FilePath: directory + fileName,
},
}
if !namePattern.MatchString(o.GetName()) {
return nil, []error{WrapError(ErrInvalidData, "name %s doesn't match %s", o.GetName(), EntityNamePattern)}
}
switch t {
case EntityTypeWorkerModel:
eo.Model = any(o).(V2WorkerModel)
case EntityTypeAction:
eo.Action = any(o).(V2Action)
case EntityTypeWorkflow:
eo.Workflow = any(o).(V2Workflow)
}

entities = append(entities, eo)
}
return entities, nil
}

type EntityCheckResponse struct {
Messages []string `json:"messages"`
}