Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add dataGovernanceType field to bigquery_routine resource #7149

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/9859.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added `dataGovernanceType` to `google_bigquery_routine` resource
```
29 changes: 29 additions & 0 deletions google-beta/services/bigquery/resource_bigquery_routine.go
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,12 @@ the schema as returned by the API.`,
},
},
},
"data_governance_type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: verify.ValidateEnum([]string{"DATA_MASKING", ""}),
Description: `If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask Possible values: ["DATA_MASKING"]`,
},
"description": {
Type: schema.TypeString,
Optional: true,
Expand Down Expand Up @@ -392,6 +398,12 @@ func resourceBigQueryRoutineCreate(d *schema.ResourceData, meta interface{}) err
} else if v, ok := d.GetOkExists("determinism_level"); !tpgresource.IsEmptyValue(reflect.ValueOf(determinismLevelProp)) && (ok || !reflect.DeepEqual(v, determinismLevelProp)) {
obj["determinismLevel"] = determinismLevelProp
}
dataGovernanceTypeProp, err := expandBigQueryRoutineDataGovernanceType(d.Get("data_governance_type"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("data_governance_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(dataGovernanceTypeProp)) && (ok || !reflect.DeepEqual(v, dataGovernanceTypeProp)) {
obj["dataGovernanceType"] = dataGovernanceTypeProp
}
sparkOptionsProp, err := expandBigQueryRoutineSparkOptions(d.Get("spark_options"), d, config)
if err != nil {
return err
Expand Down Expand Up @@ -537,6 +549,9 @@ func resourceBigQueryRoutineRead(d *schema.ResourceData, meta interface{}) error
if err := d.Set("determinism_level", flattenBigQueryRoutineDeterminismLevel(res["determinismLevel"], d, config)); err != nil {
return fmt.Errorf("Error reading Routine: %s", err)
}
if err := d.Set("data_governance_type", flattenBigQueryRoutineDataGovernanceType(res["dataGovernanceType"], d, config)); err != nil {
return fmt.Errorf("Error reading Routine: %s", err)
}
if err := d.Set("spark_options", flattenBigQueryRoutineSparkOptions(res["sparkOptions"], d, config)); err != nil {
return fmt.Errorf("Error reading Routine: %s", err)
}
Expand Down Expand Up @@ -623,6 +638,12 @@ func resourceBigQueryRoutineUpdate(d *schema.ResourceData, meta interface{}) err
} else if v, ok := d.GetOkExists("determinism_level"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, determinismLevelProp)) {
obj["determinismLevel"] = determinismLevelProp
}
dataGovernanceTypeProp, err := expandBigQueryRoutineDataGovernanceType(d.Get("data_governance_type"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("data_governance_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, dataGovernanceTypeProp)) {
obj["dataGovernanceType"] = dataGovernanceTypeProp
}
sparkOptionsProp, err := expandBigQueryRoutineSparkOptions(d.Get("spark_options"), d, config)
if err != nil {
return err
Expand Down Expand Up @@ -882,6 +903,10 @@ func flattenBigQueryRoutineDeterminismLevel(v interface{}, d *schema.ResourceDat
return v
}

func flattenBigQueryRoutineDataGovernanceType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func flattenBigQueryRoutineSparkOptions(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
if v == nil {
return nil
Expand Down Expand Up @@ -1114,6 +1139,10 @@ func expandBigQueryRoutineDeterminismLevel(v interface{}, d tpgresource.Terrafor
return v, nil
}

func expandBigQueryRoutineDataGovernanceType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}

func expandBigQueryRoutineSparkOptions(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,53 @@ resource "google_bigquery_routine" "spark_jar" {
`, context)
}

func TestAccBigQueryRoutine_bigqueryRoutineDataGovernanceTypeExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"random_suffix": acctest.RandString(t, 10),
}

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigqueryRoutineDataGovernanceTypeExample(context),
},
{
ResourceName: "google_bigquery_routine.custom_masking_routine",
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func testAccBigQueryRoutine_bigqueryRoutineDataGovernanceTypeExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
}

resource "google_bigquery_routine" "custom_masking_routine" {
dataset_id = google_bigquery_dataset.test.dataset_id
routine_id = "custom_masking_routine"
routine_type = "SCALAR_FUNCTION"
language = "SQL"
data_governance_type = "DATA_MASKING"
definition_body = "SAFE.REGEXP_REPLACE(ssn, '[0-9]', 'X')"
arguments {
name = "ssn"
data_type = "{\"typeKind\" : \"STRING\"}"
}
return_type = "{\"typeKind\" : \"STRING\"}"
}

`, context)
}

func testAccCheckBigQueryRoutineDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
Expand Down
33 changes: 33 additions & 0 deletions website/docs/r/bigquery_routine.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,34 @@ resource "google_bigquery_routine" "spark_jar" {
}
}
}
```
<div class = "oics-button" style="float: right; margin: 0 0 -15px">
<a href="https://console.cloud.google.com/cloudshell/open?cloudshell_git_repo=https%3A%2F%2Fgithub.com%2Fterraform-google-modules%2Fdocs-examples.git&cloudshell_working_dir=bigquery_routine_data_governance_type&cloudshell_image=gcr.io%2Fcloudshell-images%2Fcloudshell%3Alatest&open_in_editor=main.tf&cloudshell_print=.%2Fmotd&cloudshell_tutorial=.%2Ftutorial.md" target="_blank">
<img alt="Open in Cloud Shell" src="//gstatic.com/cloudssh/images/open-btn.svg" style="max-height: 44px; margin: 32px auto; max-width: 100%;">
</a>
</div>
## Example Usage - Bigquery Routine Data Governance Type


```hcl
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
}

resource "google_bigquery_routine" "custom_masking_routine" {
dataset_id = google_bigquery_dataset.test.dataset_id
routine_id = "custom_masking_routine"
routine_type = "SCALAR_FUNCTION"
language = "SQL"
data_governance_type = "DATA_MASKING"
definition_body = "SAFE.REGEXP_REPLACE(ssn, '[0-9]', 'X')"
arguments {
name = "ssn"
data_type = "{\"typeKind\" : \"STRING\"}"
}
return_type = "{\"typeKind\" : \"STRING\"}"
}

```
## Example Usage - Bigquery Routine Remote Function

Expand Down Expand Up @@ -338,6 +366,11 @@ The following arguments are supported:
The determinism level of the JavaScript UDF if defined.
Possible values are: `DETERMINISM_LEVEL_UNSPECIFIED`, `DETERMINISTIC`, `NOT_DETERMINISTIC`.

* `data_governance_type` -
(Optional)
If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask
Possible values are: `DATA_MASKING`.

* `spark_options` -
(Optional)
Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure.
Expand Down