Skip to content

Commit

Permalink
feat: add support for service labels on driver-svc (#1985)
Browse files Browse the repository at this point in the history
* feat: add support for service labels on driver-svc

Signed-off-by: Cian Gallagher <[email protected]>

* docs: update helm docs

Signed-off-by: Cian Gallagher <[email protected]>

* fix: undo changes to api-docs

Signed-off-by: Cian Gallagher <[email protected]>

* docs: update api-docs

Signed-off-by: Cian Gallagher <[email protected]>

* fix: update appVersion

Signed-off-by: Cian Gallagher <[email protected]>

* ci: remove step to check api change

Signed-off-by: Cian Gallagher <[email protected]>

* docs: update helm-docs

Signed-off-by: Cian Gallagher <[email protected]>

* docs: update helm-docs

Signed-off-by: Cian Gallagher <[email protected]>

* fix: update app version

Signed-off-by: Cian Gallagher <[email protected]>

* docs: update helm docs

Signed-off-by: Cian Gallagher <[email protected]>

---------

Signed-off-by: Cian Gallagher <[email protected]>
  • Loading branch information
Cian911 authored Apr 26, 2024
1 parent 8fc4058 commit 333ee0b
Show file tree
Hide file tree
Showing 13 changed files with 74 additions and 28 deletions.
11 changes: 0 additions & 11 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,6 @@ jobs:
with:
fetch-depth: "0"

- name: The API should not change once published
run: |
if ! git diff --quiet origin/master -- pkg/apis/sparkoperator.k8s.io/v1beta1; then
echo "sparkoperator.k8s.io/v1beta1 api has changed"
false
fi
if ! git diff --quiet origin/master -- pkg/apis/sparkoperator.k8s.io/v1beta2; then
echo "sparkoperator.k8s.io/v1beta2 api has changed"
false
fi
- name: The API documentation hasn't changed
run: |
make build-api-docs
Expand Down
4 changes: 2 additions & 2 deletions charts/spark-operator-chart/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
apiVersion: v2
name: spark-operator
description: A Helm chart for Spark on Kubernetes operator
version: 1.2.13
appVersion: v1beta2-1.4.4-3.5.0
version: 1.2.14
appVersion: v1beta2-1.4.5-3.5.0
keywords:
- spark
home: https://github.com/kubeflow/spark-operator
Expand Down
2 changes: 1 addition & 1 deletion charts/spark-operator-chart/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# spark-operator

![Version: 1.2.13](https://img.shields.io/badge/Version-1.2.13-informational?style=flat-square) ![AppVersion: v1beta2-1.4.4-3.5.0](https://img.shields.io/badge/AppVersion-v1beta2--1.4.4--3.5.0-informational?style=flat-square)
![Version: 1.2.14](https://img.shields.io/badge/Version-1.2.14-informational?style=flat-square) ![AppVersion: v1beta2-1.4.5-3.5.0](https://img.shields.io/badge/AppVersion-v1beta2--1.4.5--3.5.0-informational?style=flat-square)

A Helm chart for Spark on Kubernetes operator

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1336,6 +1336,10 @@ spec:
additionalProperties:
type: string
type: object
serviceLabels:
additionalProperties:
type: string
type: object
shareProcessNamespace:
type: boolean
sidecars:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1322,6 +1322,10 @@ spec:
additionalProperties:
type: string
type: object
serviceLabels:
additionalProperties:
type: string
type: object
shareProcessNamespace:
type: boolean
sidecars:
Expand Down
13 changes: 13 additions & 0 deletions docs/api-docs.md
Original file line number Diff line number Diff line change
Expand Up @@ -1105,6 +1105,19 @@ executors to connect to the driver.</p>
</tr>
<tr>
<td>
<code>serviceLabels</code><br/>
<em>
map[string]string
</em>
</td>
<td>
<em>(Optional)</em>
<p>ServiceLabels defines the labels to be added to the Kubernetes headless service used by
executors to connect to the driver.</p>
</td>
</tr>
<tr>
<td>
<code>ports</code><br/>
<em>
<a href="#sparkoperator.k8s.io/v1beta2.Port">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1336,6 +1336,10 @@ spec:
additionalProperties:
type: string
type: object
serviceLabels:
additionalProperties:
type: string
type: object
shareProcessNamespace:
type: boolean
sidecars:
Expand Down
4 changes: 4 additions & 0 deletions manifest/crds/sparkoperator.k8s.io_sparkapplications.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1322,6 +1322,10 @@ spec:
additionalProperties:
type: string
type: object
serviceLabels:
additionalProperties:
type: string
type: object
shareProcessNamespace:
type: boolean
sidecars:
Expand Down
6 changes: 5 additions & 1 deletion pkg/apis/sparkoperator.k8s.io/v1beta2/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,10 @@ type DriverSpec struct {
// executors to connect to the driver.
// +optional
ServiceAnnotations map[string]string `json:"serviceAnnotations,omitempty"`
// ServiceLabels defines the labels to be added to the Kubernetes headless service used by
// executors to connect to the driver.
// +optional
ServiceLabels map[string]string `json:"serviceLabels,omitempty"`
// Ports settings for the pods, following the Kubernetes specifications.
// +optional
Ports []Port `json:"ports,omitempty"`
Expand Down Expand Up @@ -659,7 +663,7 @@ type MonitoringSpec struct {
// If not specified, the content in spark-docker/conf/metrics.properties will be used.
MetricsProperties *string `json:"metricsProperties,omitempty"`
// MetricsPropertiesFile is the container local path of file metrics.properties for configuring
//the Spark metric system. If not specified, value /etc/metrics/conf/metrics.properties will be used.
// the Spark metric system. If not specified, value /etc/metrics/conf/metrics.properties will be used.
// +optional
MetricsPropertiesFile *string `json:"metricsPropertiesFile,omitempty"`
// Prometheus is for configuring the Prometheus JMX exporter.
Expand Down
2 changes: 2 additions & 0 deletions pkg/config/constants.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,8 @@ const (
SparkDriverKubernetesMaster = "spark.kubernetes.driver.master"
// SparkDriverServiceAnnotationKeyPrefix is the key prefix of annotations to be added to the driver service.
SparkDriverServiceAnnotationKeyPrefix = "spark.kubernetes.driver.service.annotation."
// SparkDriverServiceLabelKeyPrefix is the key prefix of annotations to be added to the driver service.
SparkDriverServiceLabelKeyPrefix = "spark.kubernetes.driver.service.label."
// SparkDynamicAllocationEnabled is the Spark configuration key for specifying if dynamic
// allocation is enabled or not.
SparkDynamicAllocationEnabled = "spark.dynamicAllocation.enabled"
Expand Down
12 changes: 8 additions & 4 deletions pkg/controller/sparkapplication/submission.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import (

"github.com/golang/glog"
v1 "k8s.io/api/core/v1"

metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"

"github.com/kubeflow/spark-operator/pkg/apis/sparkoperator.k8s.io/v1beta2"
Expand Down Expand Up @@ -59,7 +58,7 @@ func runSparkSubmit(submission *submission) (bool, error) {
if !present {
glog.Error("SPARK_HOME is not specified")
}
var command = filepath.Join(sparkHome, "/bin/spark-submit")
command := filepath.Join(sparkHome, "/bin/spark-submit")

cmd := execCommand(command, submission.args...)
glog.V(2).Infof("spark-submit arguments: %v", cmd.Args)
Expand Down Expand Up @@ -301,7 +300,7 @@ func addDriverConfOptions(app *v1beta2.SparkApplication, submissionID string) ([
fmt.Sprintf("%s=%s", config.SparkDriverKubernetesMaster, *app.Spec.Driver.KubernetesMaster))
}

//Populate SparkApplication Labels to Driver
// Populate SparkApplication Labels to Driver
driverLabels := make(map[string]string)
for key, value := range app.Labels {
driverLabels[key] = value
Expand Down Expand Up @@ -330,6 +329,11 @@ func addDriverConfOptions(app *v1beta2.SparkApplication, submissionID string) ([
fmt.Sprintf("%s%s=%s", config.SparkDriverServiceAnnotationKeyPrefix, key, value))
}

for key, value := range app.Spec.Driver.ServiceLabels {
driverConfOptions = append(driverConfOptions,
fmt.Sprintf("%s%s=%s", config.SparkDriverServiceLabelKeyPrefix, key, value))
}

driverConfOptions = append(driverConfOptions, config.GetDriverSecretConfOptions(app)...)
driverConfOptions = append(driverConfOptions, config.GetDriverEnvVarConfOptions(app)...)

Expand Down Expand Up @@ -388,7 +392,7 @@ func addExecutorConfOptions(app *v1beta2.SparkApplication, submissionID string)
fmt.Sprintf("%s=%t", config.SparkExecutorDeleteOnTermination, *app.Spec.Executor.DeleteOnTermination))
}

//Populate SparkApplication Labels to Executors
// Populate SparkApplication Labels to Executors
executorLabels := make(map[string]string)
for key, value := range app.Labels {
executorLabels[key] = value
Expand Down
21 changes: 12 additions & 9 deletions pkg/controller/sparkapplication/submission_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ import (
"testing"

"github.com/google/uuid"

"github.com/stretchr/testify/assert"

corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
Expand All @@ -41,6 +39,7 @@ const (
VolumeMountOptionPathTemplate = "spark.kubernetes.%s.volumes.%s.%s.options.%s=%s"
SparkDriverLabelAnnotationTemplate = "spark.kubernetes.driver.label.sparkoperator.k8s.io/%s=%s"
SparkDriverLabelTemplate = "spark.kubernetes.driver.label.%s=%s"
SparkDriverServiceLabelTemplate = "spark.kubernetes.driver.service.label.%s=%s"
SparkExecutorLabelAnnotationTemplate = "spark.kubernetes.executor.label.sparkoperator.k8s.io/%s=%s"
SparkExecutorLabelTemplate = "spark.kubernetes.executor.label.%s=%s"
)
Expand Down Expand Up @@ -424,12 +423,14 @@ func TestAddEmptyDir_Driver_Executor_WithSizeLimit(t *testing.T) {

func TestPopulateLabels_Driver_Executor(t *testing.T) {
const (
AppLabelKey = "app-label-key"
AppLabelValue = "app-label-value"
DriverLabelKey = "driver-label-key"
DriverLabelValue = "driver-label-key"
ExecutorLabelKey = "executor-label-key"
ExecutorLabelValue = "executor-label-key"
AppLabelKey = "app-label-key"
AppLabelValue = "app-label-value"
DriverLabelKey = "driver-label-key"
DriverLabelValue = "driver-label-key"
DriverServiceLabelKey = "driver-svc-label-key"
DriverServiceLabelValue = "driver-svc-label-value"
ExecutorLabelKey = "executor-label-key"
ExecutorLabelValue = "executor-label-key"
)

app := &v1beta2.SparkApplication{
Expand All @@ -440,6 +441,7 @@ func TestPopulateLabels_Driver_Executor(t *testing.T) {
},
Spec: v1beta2.SparkApplicationSpec{
Driver: v1beta2.DriverSpec{
ServiceLabels: map[string]string{DriverServiceLabelKey: DriverServiceLabelValue},
SparkPodSpec: v1beta2.SparkPodSpec{
Labels: map[string]string{DriverLabelKey: DriverLabelValue},
},
Expand All @@ -457,14 +459,15 @@ func TestPopulateLabels_Driver_Executor(t *testing.T) {
if err != nil {
t.Fatal(err)
}
assert.Equal(t, 5, len(driverOptions))
assert.Equal(t, 6, len(driverOptions))
sort.Strings(driverOptions)
expectedDriverLabels := []string{
fmt.Sprintf(SparkDriverLabelAnnotationTemplate, "launched-by-spark-operator", strconv.FormatBool(true)),
fmt.Sprintf(SparkDriverLabelAnnotationTemplate, "app-name", "spark-test"),
fmt.Sprintf(SparkDriverLabelAnnotationTemplate, "submission-id", submissionID),
fmt.Sprintf(SparkDriverLabelTemplate, AppLabelKey, AppLabelValue),
fmt.Sprintf(SparkDriverLabelTemplate, DriverLabelKey, DriverLabelValue),
fmt.Sprintf(SparkDriverServiceLabelTemplate, DriverServiceLabelKey, DriverServiceLabelValue),
}
sort.Strings(expectedDriverLabels)

Expand Down
15 changes: 15 additions & 0 deletions test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash

DOCKERFILE_RESOURCES=$(cat Dockerfile | grep -o "COPY [a-zA-Z0-9].*? " | cut -c6-)

for resource in $DOCKERFILE_RESOURCES; do
# If the resource is different
if ! git diff --quiet origin/master -- $resource; then
## And the appVersion hasn't been updated
if ! git diff origin/master -- charts/spark-operator-chart/Chart.yaml | grep +appVersion; then
echo "resource used in docker.io/kubeflow/spark-operator has changed in $resource, need to update the appVersion in charts/spark-operator-chart/Chart.yaml"
git diff origin/master -- $resource;
echo "failing the build... " && false
fi
fi
done

0 comments on commit 333ee0b

Please sign in to comment.