Skip to content
This repository has been archived by the owner on Oct 9, 2023. It is now read-only.

Commit

Permalink
Add non-interruptible node selector requirement to spark driver if set (
Browse files Browse the repository at this point in the history
#346)

* Add non-interruptible node selector requirement to spark driver if set

Signed-off-by: Jeev B <[email protected]>

* comment

Signed-off-by: Jeev B <[email protected]>

---------

Signed-off-by: Jeev B <[email protected]>
  • Loading branch information
jeevb authored Apr 25, 2023
1 parent 01f2126 commit fcbbd23
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 2 deletions.
5 changes: 5 additions & 0 deletions flyteplugins/go/tasks/plugins/k8s/spark/spark.go
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,11 @@ func (sparkResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsCo
j.Spec.MainClass = &sparkJob.MainClass
}

// Spark driver pods should always run as non-interruptible. As such, we hardcode
// `interruptible=false` to explicitly add non-interruptible node selector
// requirements to the driver pods
flytek8s.ApplyInterruptibleNodeSelectorRequirement(false, j.Spec.Driver.Affinity)

// Add Interruptible Tolerations/NodeSelector to only Executor pods.
// The Interruptible NodeSelector takes precedence over the DefaultNodeSelector
if taskCtx.TaskExecutionMetadata().IsInterruptible() {
Expand Down
22 changes: 20 additions & 2 deletions flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -585,8 +585,17 @@ func TestBuildResourceSpark(t *testing.T) {
assert.Equal(t, sparkApp.Spec.Executor.EnvVars["foo"], defaultEnvVars["foo"])
assert.Equal(t, sparkApp.Spec.Driver.EnvVars["fooEnv"], targetValueFromEnv)
assert.Equal(t, sparkApp.Spec.Executor.EnvVars["fooEnv"], targetValueFromEnv)
assert.Equal(t, sparkApp.Spec.Driver.Affinity, defaultAffinity)

assert.Equal(
t,
sparkApp.Spec.Driver.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0],
defaultAffinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0],
)
assert.Equal(
t,
sparkApp.Spec.Driver.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[1],
*nonInterruptibleNodeSelectorRequirement,
)
assert.Equal(
t,
sparkApp.Spec.Executor.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0],
Expand Down Expand Up @@ -639,7 +648,16 @@ func TestBuildResourceSpark(t *testing.T) {
assert.Equal(t, sparkApp.Spec.Driver.Tolerations[0].Value, "default")

// Validate correct affinity and nodeselector requirements are set for both Driver and Executors.
assert.Equal(t, sparkApp.Spec.Driver.Affinity, defaultAffinity)
assert.Equal(
t,
sparkApp.Spec.Driver.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0],
defaultAffinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0],
)
assert.Equal(
t,
sparkApp.Spec.Driver.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[1],
*nonInterruptibleNodeSelectorRequirement,
)
assert.Equal(
t,
sparkApp.Spec.Executor.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0],
Expand Down

0 comments on commit fcbbd23

Please sign in to comment.