Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for BQ priority and execution project #164

Merged
merged 1 commit into from
Jul 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/data-sources/bigquery_connection.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ description: |-
- `client_x509_cert_url` (String) Client X509 Cert URL for the Service Account
- `dataproc_cluster_name` (String) Dataproc cluster name for PySpark workloads
- `dataproc_region` (String) Google Cloud region for PySpark workloads on Dataproc
- `execution_project` (String) Project to bill for query execution
- `gcp_project_id` (String) GCP project ID
- `gcs_bucket` (String) URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- `id` (String) The ID of this resource.
Expand All @@ -37,6 +38,7 @@ description: |-
- `location` (String) Location to create new Datasets in
- `maximum_bytes_billed` (Number) Max number of bytes that can be billed for a given BigQuery query
- `name` (String) Connection name
- `priority` (String) The priority with which to execute BigQuery queries
- `private_key` (String) Private key of the Service Account
- `private_key_id` (String) Private key ID of the Service Account
- `retries` (Number) Number of retries for queries
Expand Down
2 changes: 2 additions & 0 deletions docs/resources/bigquery_connection.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,12 @@ resource "dbtcloud_bigquery_connection" "test_connection_with_oauth" {
- `application_secret` (String, Sensitive) The Application Secret for BQ OAuth
- `dataproc_cluster_name` (String) Dataproc cluster name for PySpark workloads
- `dataproc_region` (String) Google Cloud region for PySpark workloads on Dataproc
- `execution_project` (String) Project to bill for query execution
- `gcs_bucket` (String) URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- `is_active` (Boolean) Whether the connection is active
- `location` (String) Location to create new Datasets in
- `maximum_bytes_billed` (Number) Max number of bytes that can be billed for a given BigQuery query
- `priority` (String) The priority with which to execute BigQuery queries (batch or interactive)
- `retries` (Number) Number of retries for queries

### Read-Only
Expand Down
16 changes: 16 additions & 0 deletions pkg/data_sources/bigquery_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,16 @@ var bigQueryConnectionSchema = map[string]*schema.Schema{
Computed: true,
Description: "Max number of bytes that can be billed for a given BigQuery query",
},
"execution_project": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "Project to bill for query execution",
},
"priority": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The priority with which to execute BigQuery queries",
},
"gcs_bucket": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Expand Down Expand Up @@ -193,6 +203,12 @@ func datasourceBigQueryConnectionRead(ctx context.Context, d *schema.ResourceDat
if err := d.Set("maximum_bytes_billed", connection.Details.MaximumBytesBilled); err != nil {
return diag.FromErr(err)
}
if err := d.Set("execution_project", connection.Details.ExecutionProject); err != nil {
return diag.FromErr(err)
}
if err := d.Set("priority", connection.Details.Priority); err != nil {
return diag.FromErr(err)
}
if err := d.Set("gcs_bucket", connection.Details.GcsBucket); err != nil {
return diag.FromErr(err)
}
Expand Down
4 changes: 4 additions & 0 deletions pkg/data_sources/bigquery_connection_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ func TestAccDbtCloudBigQueryConnectionDataSource(t *testing.T) {
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "retries", "3"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "location", "EU"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "maximum_bytes_billed", "100000"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "execution_project", "test_gcp_project_id2"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "priority", "batch"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "gcs_bucket", "test_gcs_bucket"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "dataproc_region", "test_dataproc_region"),
resource.TestCheckResourceAttr("data.dbtcloud_bigquery_connection.test", "dataproc_cluster_name", "test_dataproc_cluster_name"),
Expand Down Expand Up @@ -75,6 +77,8 @@ func bigQueryConnection(projectName, connectionName string) string {
retries = 3
location = "EU"
maximum_bytes_billed = 100000
execution_project = "test_gcp_project_id2"
priority = "batch"
gcs_bucket = "test_gcs_bucket"
dataproc_region = "test_dataproc_region"
dataproc_cluster_name = "test_dataproc_cluster_name"
Expand Down
6 changes: 6 additions & 0 deletions pkg/dbt_cloud/bigquery_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ type BigQueryConnectionDetails struct {
Retries *int `json:"retries,omitempty"`
Location *string `json:"location,omitempty"`
MaximumBytesBilled *int `json:"maximum_bytes_billed,omitempty"`
ExecutionProject *string `json:"execution_project,omitempty"`
Priority *string `json:"priority,omitempty"`
GcsBucket *string `json:"gcs_bucket,omitempty"`
DataprocRegion *string `json:"dataproc_region,omitempty"`
DataprocClusterName *string `json:"dataproc_cluster_name,omitempty"`
Expand Down Expand Up @@ -88,6 +90,8 @@ func (c *Client) CreateBigQueryConnection(
retries *int,
location *string,
maximumBytesBilled *int,
executionProject *string,
priority *string,
gcsBucket *string,
dataprocRegion *string,
dataprocClusterName *string,
Expand All @@ -112,6 +116,8 @@ func (c *Client) CreateBigQueryConnection(
Retries: retries,
Location: location,
MaximumBytesBilled: maximumBytesBilled,
ExecutionProject: executionProject,
Priority: priority,
GcsBucket: gcsBucket,
DataprocRegion: dataprocRegion,
DataprocClusterName: dataprocClusterName,
Expand Down
56 changes: 56 additions & 0 deletions pkg/resources/bigquery_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)

var bigQueryPriorities = []string{
"batch",
"interactive",
}

func ResourceBigQueryConnection() *schema.Resource {
return &schema.Resource{
CreateContext: resourceBigQueryConnectionCreate,
Expand Down Expand Up @@ -116,6 +121,17 @@ func ResourceBigQueryConnection() *schema.Resource {
Optional: true,
Description: "Max number of bytes that can be billed for a given BigQuery query",
},
"execution_project": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Description: "Project to bill for query execution",
},
"priority": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Description: "The priority with which to execute BigQuery queries (batch or interactive)",
ValidateFunc: validation.StringInSlice(bigQueryPriorities, false),
},
"gcs_bucket": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Expand Down Expand Up @@ -197,6 +213,20 @@ func resourceBigQueryConnectionCreate(ctx context.Context, d *schema.ResourceDat
maximumBytesBilled := d.Get("maximum_bytes_billed").(int)
maximumBytesBilledVal = &maximumBytesBilled
}
var executionProjectVal *string
if d.Get("execution_project").(string) == "" {
executionProjectVal = nil
} else {
executionProject := d.Get("execution_project").(string)
executionProjectVal = &executionProject
}
var priorityVal *string
if d.Get("priority").(string) == "" {
priorityVal = nil
} else {
priority := d.Get("priority").(string)
priorityVal = &priority
}
var gcsBucketVal *string
if d.Get("gcs_bucket").(string) == "" {
gcsBucketVal = nil
Expand Down Expand Up @@ -241,6 +271,8 @@ func resourceBigQueryConnectionCreate(ctx context.Context, d *schema.ResourceDat
retriesVal,
locationVal,
maximumBytesBilledVal,
executionProjectVal,
priorityVal,
gcsBucketVal,
dataprocRegionVal,
dataprocClusterNameVal,
Expand Down Expand Up @@ -329,6 +361,12 @@ func resourceBigQueryConnectionRead(ctx context.Context, d *schema.ResourceData,
if err := d.Set("maximum_bytes_billed", connection.Details.MaximumBytesBilled); err != nil {
return diag.FromErr(err)
}
if err := d.Set("execution_project", connection.Details.ExecutionProject); err != nil {
return diag.FromErr(err)
}
if err := d.Set("priority", connection.Details.Priority); err != nil {
return diag.FromErr(err)
}
if err := d.Set("gcs_bucket", connection.Details.GcsBucket); err != nil {
return diag.FromErr(err)
}
Expand Down Expand Up @@ -368,6 +406,8 @@ func resourceBigQueryConnectionUpdate(ctx context.Context, d *schema.ResourceDat
d.HasChange("retries") ||
d.HasChange("location") ||
d.HasChange("maximum_bytes_billed") ||
d.HasChange("execution_project") ||
d.HasChange("priority") ||
d.HasChange("gcs_bucket") ||
d.HasChange("dataproc_region") ||
d.HasChange("dataproc_cluster_name") ||
Expand Down Expand Up @@ -450,6 +490,22 @@ func resourceBigQueryConnectionUpdate(ctx context.Context, d *schema.ResourceDat
connection.Details.MaximumBytesBilled = &maximumBytesBilled
}
}
if d.HasChange("execution_project") {
executionProject := d.Get("execution_project").(string)
if executionProject == "" {
connection.Details.ExecutionProject = nil
} else {
connection.Details.ExecutionProject = &executionProject
}
}
if d.HasChange("priority") {
priority := d.Get("priority").(string)
if priority == "" {
connection.Details.Priority = nil
} else {
connection.Details.Priority = &priority
}
}
if d.HasChange("gcs_bucket") {
gcsBucket := d.Get("gcs_bucket").(string)
if gcsBucket == "" {
Expand Down
7 changes: 7 additions & 0 deletions pkg/resources/bigquery_connection_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ func TestAccDbtCloudBigQueryConnectionResource(t *testing.T) {
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "auth_provider_x509_cert_url", "test_auth_provider_x509_cert_url"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "client_x509_cert_url", "test_client_x509_cert_url"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "retries", "3"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "execution_project", "test_project_id_2"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "priority", "interactive"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "is_configured_for_oauth", "false"),
),
},
Expand All @@ -57,6 +59,7 @@ func TestAccDbtCloudBigQueryConnectionResource(t *testing.T) {
Config: testAccDbtCloudBigQueryConnectionResourceOAuthConfig(connectionName2, projectName, privateKey),
Check: resource.ComposeTestCheckFunc(
testAccCheckDbtCloudConnectionExists("dbtcloud_bigquery_connection.test_connection"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "priority", "batch"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "application_secret", "test_application_secret"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "application_id", "test_application_id"),
resource.TestCheckResourceAttr("dbtcloud_bigquery_connection.test_connection", "is_configured_for_oauth", "true"),
Expand Down Expand Up @@ -94,6 +97,8 @@ resource "dbtcloud_bigquery_connection" "test_connection" {
auth_provider_x509_cert_url = "test_auth_provider_x509_cert_url"
client_x509_cert_url = "test_client_x509_cert_url"
retries = 3
execution_project = "test_project_id_2"
priority = "interactive"
}
`, projectName, connectionName, privateKey)
}
Expand All @@ -119,6 +124,8 @@ resource "dbtcloud_bigquery_connection" "test_connection" {
auth_provider_x509_cert_url = "test_auth_provider_x509_cert_url"
client_x509_cert_url = "test_client_x509_cert_url"
retries = 3
execution_project = "test_project_id_2"
priority = "batch"
application_secret = "test_application_secret"
application_id = "test_application_id"
}
Expand Down