Skip to content

Commit

Permalink
added triggerId to jobTrigger resource (#8091) (#5773)
Browse files Browse the repository at this point in the history
Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Jun 13, 2023
1 parent 7d9bfb3 commit 6fc4ff7
Show file tree
Hide file tree
Showing 4 changed files with 162 additions and 11 deletions.
3 changes: 3 additions & 0 deletions .changelog/8091.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
dlp: added `trigger_id` field to `google_data_loss_prevention_job_trigger`
```
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBasicExample(t *testing.T)
ResourceName: "google_data_loss_prevention_job_trigger.basic",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -113,7 +113,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitExample(t
ResourceName: "google_data_loss_prevention_job_trigger.bigquery_row_limit",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -181,7 +181,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitPercentage
ResourceName: "google_data_loss_prevention_job_trigger.bigquery_row_limit_percentage",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -249,7 +249,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(t
ResourceName: "google_data_loss_prevention_job_trigger.data_catalog_output",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -310,7 +310,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(t *testin
ResourceName: "google_data_loss_prevention_job_trigger.scc_output",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -371,7 +371,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerJobNotificationEmailsExamp
ResourceName: "google_data_loss_prevention_job_trigger.job_notification_emails",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -428,7 +428,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerDeidentifyExample(t *testi
ResourceName: "google_data_loss_prevention_job_trigger.deidentify",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -542,7 +542,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerHybridExample(t *testing.T
ResourceName: "google_data_loss_prevention_job_trigger.hybrid_trigger",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -610,7 +610,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerInspectExample(t *testing.
ResourceName: "google_data_loss_prevention_job_trigger.inspect",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -729,7 +729,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPublishToStackdriverExampl
ResourceName: "google_data_loss_prevention_job_trigger.publish_to_stackdriver",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
Expand Down Expand Up @@ -765,6 +765,70 @@ resource "google_data_loss_prevention_job_trigger" "publish_to_stackdriver" {
`, context)
}

func TestAccDataLossPreventionJobTrigger_dlpJobTriggerWithIdExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"project": acctest.GetTestProjectFromEnv(),
"random_suffix": RandString(t, 10),
}

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerWithIdExample(context),
},
{
ResourceName: "google_data_loss_prevention_job_trigger.with_trigger_id",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
},
},
})
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerWithIdExample(context map[string]interface{}) string {
return tpgresource.Nprintf(`
resource "google_data_loss_prevention_job_trigger" "with_trigger_id" {
parent = "projects/%{project}"
description = "Starting description"
display_name = "display"
trigger_id = "tf-test-id-%{random_suffix}"
triggers {
schedule {
recurrence_period_duration = "86400s"
}
}
inspect_job {
inspect_template_name = "fake"
actions {
save_findings {
output_config {
table {
project_id = "project"
dataset_id = "dataset123"
}
}
}
}
storage_config {
cloud_storage_options {
file_set {
url = "gs://mybucket/directory/"
}
}
}
}
}
`, context)
}

func testAccCheckDataLossPreventionJobTriggerDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1374,6 +1374,15 @@ at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built
Description: `Whether the trigger is currently active. Default value: "HEALTHY" Possible values: ["PAUSED", "HEALTHY", "CANCELLED"]`,
Default: "HEALTHY",
},
"trigger_id": {
Type: schema.TypeString,
Computed: true,
Optional: true,
ForceNew: true,
Description: `The trigger id can contain uppercase and lowercase letters, numbers, and hyphens;
that is, it must match the regular expression: [a-zA-Z\d-_]+.
The maximum length is 100 characters. Can be empty to allow the system to generate one.`,
},
"create_time": {
Type: schema.TypeString,
Computed: true,
Expand Down Expand Up @@ -1514,6 +1523,18 @@ func resourceDataLossPreventionJobTriggerRead(d *schema.ResourceData, meta inter
return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("DataLossPreventionJobTrigger %q", d.Id()))
}

res, err = resourceDataLossPreventionJobTriggerDecoder(d, meta, res)
if err != nil {
return err
}

if res == nil {
// Decoding the object has resulted in it being gone. It may be marked deleted
log.Printf("[DEBUG] Removing DataLossPreventionJobTrigger because it no longer exists.")
d.SetId("")
return nil
}

if err := d.Set("name", flattenDataLossPreventionJobTriggerName(res["name"], d, config)); err != nil {
return fmt.Errorf("Error reading JobTrigger: %s", err)
}
Expand Down Expand Up @@ -1586,7 +1607,7 @@ func resourceDataLossPreventionJobTriggerUpdate(d *schema.ResourceData, meta int
obj["inspectJob"] = inspectJobProp
}

obj, err = resourceDataLossPreventionJobTriggerEncoder(d, meta, obj)
obj, err = resourceDataLossPreventionJobTriggerUpdateEncoder(d, meta, obj)
if err != nil {
return err
}
Expand Down Expand Up @@ -5761,7 +5782,26 @@ func expandDataLossPreventionJobTriggerInspectJobActionsPublishToStackdriver(v i
}

func resourceDataLossPreventionJobTriggerEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {

newObj := make(map[string]interface{})
newObj["jobTrigger"] = obj
triggerIdProp, ok := d.GetOk("trigger_id")
if ok && triggerIdProp != nil {
newObj["triggerId"] = triggerIdProp
}
return newObj, nil
}

func resourceDataLossPreventionJobTriggerUpdateEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
newObj := make(map[string]interface{})
newObj["jobTrigger"] = obj
return newObj, nil
}

func resourceDataLossPreventionJobTriggerDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) {
config := meta.(*transport_tpg.Config)
if err := d.Set("trigger_id", flattenDataLossPreventionJobTriggerName(res["name"], d, config)); err != nil {
return nil, fmt.Errorf("Error reading JobTrigger: %s", err)
}
return res, nil
}
44 changes: 44 additions & 0 deletions website/docs/r/data_loss_prevention_job_trigger.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -432,6 +432,44 @@ resource "google_data_loss_prevention_job_trigger" "publish_to_stackdriver" {
}
}
```
## Example Usage - Dlp Job Trigger With Id


```hcl
resource "google_data_loss_prevention_job_trigger" "with_trigger_id" {
parent = "projects/my-project-name"
description = "Starting description"
display_name = "display"
trigger_id = "id-"
triggers {
schedule {
recurrence_period_duration = "86400s"
}
}
inspect_job {
inspect_template_name = "fake"
actions {
save_findings {
output_config {
table {
project_id = "project"
dataset_id = "dataset123"
}
}
}
}
storage_config {
cloud_storage_options {
file_set {
url = "gs://mybucket/directory/"
}
}
}
}
}
```

## Argument Reference

Expand Down Expand Up @@ -481,6 +519,12 @@ The following arguments are supported:
(Optional)
User set display name of the job trigger.

* `trigger_id` -
(Optional)
The trigger id can contain uppercase and lowercase letters, numbers, and hyphens;
that is, it must match the regular expression: [a-zA-Z\d-_]+.
The maximum length is 100 characters. Can be empty to allow the system to generate one.

* `status` -
(Optional)
Whether the trigger is currently active.
Expand Down

0 comments on commit 6fc4ff7

Please sign in to comment.