Skip to content

Commit

Permalink
Merge pull request #276 from dbt-labs/feature/datasource-all-jobs
Browse files Browse the repository at this point in the history
Add a new datasource for `dbtcloud_jobs` to retrieve multiple jobs
  • Loading branch information
chasewalden authored Jul 16, 2024
2 parents d808276 + f1c1e45 commit 2797749
Show file tree
Hide file tree
Showing 14 changed files with 828 additions and 14 deletions.
133 changes: 133 additions & 0 deletions docs/data-sources/jobs.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "dbtcloud_jobs Data Source - dbtcloud"
subcategory: ""
description: |-
Retrieve all the jobs for a given dbt Cloud project or environment along with the environment details for the jobs. This will return both the jobs created from Terraform but also the jobs created in the dbt Cloud UI.
---

# dbtcloud_jobs (Data Source)

Retrieve all the jobs for a given dbt Cloud project or environment along with the environment details for the jobs. This will return both the jobs created from Terraform but also the jobs created in the dbt Cloud UI.

## Example Usage

```terraform
// we can search all jobs by project
data dbtcloud_jobs test_all_jobs_in_project {
project_id = 1234
}
// or by environment
data dbtcloud_jobs test_all_jobs_in_environment {
environment_id = 1234
}
// we can then retrieve all the jobs from the environment flagged as production
// this would include the jobs created by Terraform and the jobs created from the dbt Cloud UI
locals {
my_jobs_prod = [for job in data.dbtcloud_jobs.test_all_jobs_in_project.jobs : job if job.environment.deployment_type == "production"]
}
```

<!-- schema generated by tfplugindocs -->
## Schema

### Optional

- `environment_id` (Number) The ID of the environment for which we want to retrieve the jobs (one of `project_id` or `environment_id` must be set)
- `project_id` (Number) The ID of the project for which we want to retrieve the jobs (one of `project_id` or `environment_id` must be set)

### Read-Only

- `jobs` (Attributes Set) Set of jobs with their details (see [below for nested schema](#nestedatt--jobs))

<a id="nestedatt--jobs"></a>
### Nested Schema for `jobs`

Read-Only:

- `dbt_version` (String) The version of dbt used for the job. If not set, the environment version will be used.
- `deferring_environment_id` (Number) The ID of the environment this job defers to
- `deferring_job_definition_id` (Number) [Deprecated - deferral is now set at the environment level] The ID of the job definition this job defers to
- `description` (String) The description of the job
- `environment` (Attributes) Details of the environment the job is running in (see [below for nested schema](#nestedatt--jobs--environment))
- `environment_id` (Number) The ID of environment
- `execute_steps` (List of String) The list of steps to run in the job
- `execution` (Attributes) (see [below for nested schema](#nestedatt--jobs--execution))
- `generate_docs` (Boolean) Whether the job generate docs
- `id` (Number) The ID of the job
- `job_completion_trigger_condition` (Attributes) Whether the job is triggered by the completion of another job (see [below for nested schema](#nestedatt--jobs--job_completion_trigger_condition))
- `job_type` (String) The type of job (e.g. CI, scheduled)
- `name` (String) The name of the job
- `project_id` (Number) The ID of the project
- `run_generate_sources` (Boolean) Whether the job test source freshness
- `schedule` (Attributes) (see [below for nested schema](#nestedatt--jobs--schedule))
- `settings` (Attributes) (see [below for nested schema](#nestedatt--jobs--settings))
- `triggers` (Attributes) (see [below for nested schema](#nestedatt--jobs--triggers))
- `triggers_on_draft_pr` (Boolean) Whether the CI job should be automatically triggered on draft PRs

<a id="nestedatt--jobs--environment"></a>
### Nested Schema for `jobs.environment`

Read-Only:

- `deployment_type` (String) Type of deployment environment: staging, production
- `id` (Number) ID of the environment
- `name` (String) Name of the environment
- `project_id` (Number)
- `type` (String) Environment type: development or deployment


<a id="nestedatt--jobs--execution"></a>
### Nested Schema for `jobs.execution`

Read-Only:

- `timeout_seconds` (Number) The number of seconds before the job times out


<a id="nestedatt--jobs--job_completion_trigger_condition"></a>
### Nested Schema for `jobs.job_completion_trigger_condition`

Read-Only:

- `condition` (Attributes) (see [below for nested schema](#nestedatt--jobs--job_completion_trigger_condition--condition))

<a id="nestedatt--jobs--job_completion_trigger_condition--condition"></a>
### Nested Schema for `jobs.job_completion_trigger_condition.condition`

Read-Only:

- `job_id` (Number)
- `project_id` (Number)
- `statuses` (Set of String)



<a id="nestedatt--jobs--schedule"></a>
### Nested Schema for `jobs.schedule`

Read-Only:

- `cron` (String) The cron schedule for the job. Only used if triggers.schedule is true


<a id="nestedatt--jobs--settings"></a>
### Nested Schema for `jobs.settings`

Read-Only:

- `target_name` (String) Value for `target.name` in the Jinja context
- `threads` (Number) Number of threads to run dbt with


<a id="nestedatt--jobs--triggers"></a>
### Nested Schema for `jobs.triggers`

Read-Only:

- `git_provider_webhook` (Boolean) Whether the job runs automatically on PR creation
- `github_webhook` (Boolean) Whether the job runs automatically on PR creation
- `on_merge` (Boolean) Whether the job runs automatically once a PR is merged
- `schedule` (Boolean) Whether the job runs on a schedule
15 changes: 15 additions & 0 deletions examples/data-sources/dbtcloud_jobs/data-source.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
// we can search all jobs by project
data dbtcloud_jobs test_all_jobs_in_project {
project_id = 1234
}

// or by environment
data dbtcloud_jobs test_all_jobs_in_environment {
environment_id = 1234
}

// we can then retrieve all the jobs from the environment flagged as production
// this would include the jobs created by Terraform and the jobs created from the dbt Cloud UI
locals {
my_jobs_prod = [for job in data.dbtcloud_jobs.test_all_jobs_in_project.jobs : job if job.environment.deployment_type == "production"]
}
5 changes: 5 additions & 0 deletions pkg/dbt_cloud/job.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,11 @@ type Job struct {
JobCompletionTrigger *JobCompletionTrigger `json:"job_completion_trigger_condition"`
}

type JobWithEnvironment struct {
Job
Environment Environment `json:"environment"`
}

func (c *Client) GetJob(jobID string) (*Job, error) {
req, err := http.NewRequest(
"GET",
Expand Down
45 changes: 45 additions & 0 deletions pkg/dbt_cloud/paginate.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,3 +185,48 @@ func (c *Client) GetAllLicenseMaps() ([]LicenseMap, error) {
}
return allLicenseMaps, nil
}

func (c *Client) GetAllJobs(projectID int, environmentID int) ([]JobWithEnvironment, error) {
var url string

if projectID != 0 && environmentID != 0 {
return nil, fmt.Errorf("you can't filter by both project and environment")
}

if projectID == 0 && environmentID == 0 {
return nil, fmt.Errorf("you must filter by either project or environment")
}

if projectID != 0 {
url = fmt.Sprintf(
"%s/v2/accounts/%d/jobs?project_id=%d&include_related=[environment]",
c.HostURL,
c.AccountID,
projectID,
)
}

if environmentID != 0 {
url = fmt.Sprintf(
"%s/v2/accounts/%d/jobs?environment_id=%d&include_related=[environment]",
c.HostURL,
c.AccountID,
environmentID,
)
}

allJobsRaw := c.GetData(url)

allJobs := []JobWithEnvironment{}
for _, job := range allJobsRaw {

data, _ := json.Marshal(job)
currentJob := JobWithEnvironment{}
err := json.Unmarshal(data, &currentJob)
if err != nil {
return nil, err
}
allJobs = append(allJobs, currentJob)
}
return allJobs, nil
}
2 changes: 1 addition & 1 deletion pkg/framework/objects/environment/data_source_all.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ var (
_ datasource.DataSourceWithConfigure = &environmentsDataSources{}
)

func EnvironmentsDataSources() datasource.DataSource {
func EnvironmentsDataSource() datasource.DataSource {
return &environmentsDataSources{}
}

Expand Down
165 changes: 165 additions & 0 deletions pkg/framework/objects/job/data_source_all.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
package job

import (
"context"

"github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud"
"github.com/dbt-labs/terraform-provider-dbtcloud/pkg/helper"
"github.com/dbt-labs/terraform-provider-dbtcloud/pkg/utils"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/samber/lo"
)

var (
_ datasource.DataSource = &jobsDataSource{}
_ datasource.DataSourceWithConfigure = &jobsDataSource{}
_ datasource.DataSourceWithValidateConfig = &jobsDataSource{}
)

func JobsDataSource() datasource.DataSource {
return &jobsDataSource{}
}

type jobsDataSource struct {
client *dbt_cloud.Client
}

func (d *jobsDataSource) Metadata(
_ context.Context,
req datasource.MetadataRequest,
resp *datasource.MetadataResponse,
) {
resp.TypeName = req.ProviderTypeName + "_jobs"
}

func (d *jobsDataSource) Read(
ctx context.Context,
req datasource.ReadRequest,
resp *datasource.ReadResponse,
) {
var config JobsDataSourceModel

resp.Diagnostics.Append(req.Config.Get(ctx, &config)...)

var projectID int
if config.ProjectID.IsNull() {
projectID = 0
} else {
projectID = int(config.ProjectID.ValueInt64())
}
var environmentID int
if config.EnvironmentID.IsNull() {
environmentID = 0
} else {
environmentID = int(config.EnvironmentID.ValueInt64())
}

apiJobs, err := d.client.GetAllJobs(projectID, environmentID)

if err != nil {
resp.Diagnostics.AddError(
"Issue when retrieving jobs",
err.Error(),
)
return
}

state := config

allJobs := []JobDataSourceModel{}
for _, job := range apiJobs {

// we need to handle the case the condition is nil
var jobCompletionTriggerCondition *JobCompletionTrigger
if job.JobCompletionTrigger != nil {
jobCompletionTriggerCondition = &JobCompletionTrigger{
Condition: JobCompletionTriggerCondition{
JobID: types.Int64Value(
int64(job.JobCompletionTrigger.Condition.JobID),
),
ProjectID: types.Int64Value(
int64(job.JobCompletionTrigger.Condition.ProjectID),
),
Statuses: lo.Map(
job.JobCompletionTrigger.Condition.Statuses,
func(status int, _ int) types.String {
return types.StringValue(
utils.JobCompletionTriggerConditionsMappingCodeHuman[status].(string),
)
},
),
},
}
}

currentJob := JobDataSourceModel{
Execution: JobExecution{
TimeoutSeconds: types.Int64Value(int64(job.Execution.Timeout_Seconds)),
},
GenerateDocs: types.BoolValue(job.Generate_Docs),
RunGenerateSources: types.BoolValue(job.Run_Generate_Sources),
ID: types.Int64PointerValue(
helper.IntPointerToInt64Pointer(job.ID),
),
ProjectID: types.Int64Value(int64(job.Project_Id)),
EnvironmentID: types.Int64Value(int64(job.Environment_Id)),
Name: types.StringValue(job.Name),
Description: types.StringValue(job.Description),
DbtVersion: types.StringPointerValue(
job.Dbt_Version,
),
ExecuteSteps: helper.SliceStringToSliceTypesString(job.Execute_Steps),
DeferringJobDefinitionID: types.Int64PointerValue(helper.IntPointerToInt64Pointer(
job.Deferring_Job_Id),
),
DeferringEnvironmentID: types.Int64PointerValue(helper.IntPointerToInt64Pointer(
job.DeferringEnvironmentId),
),
Triggers: JobTriggers{
GithubWebhook: types.BoolValue(job.Triggers.Github_Webhook),
GitProviderWebhook: types.BoolValue(job.Triggers.GitProviderWebhook),
Schedule: types.BoolValue(job.Triggers.Schedule),
OnMerge: types.BoolValue(job.Triggers.OnMerge),
},
Settings: JobSettings{
Threads: types.Int64Value(int64(job.Settings.Threads)),
TargetName: types.StringValue(job.Settings.Target_Name),
},
Schedule: JobSchedule{
Cron: types.StringValue(job.Schedule.Cron),
},
JobType: types.StringValue(job.JobType),
TriggersOnDraftPr: types.BoolValue(job.TriggersOnDraftPR),
Environment: JobEnvironment{
ProjectID: types.Int64Value(int64(job.Environment.Project_Id)),
ID: types.Int64Value(int64(*job.Environment.ID)),
Name: types.StringValue(job.Environment.Name),
DeploymentType: types.StringPointerValue(job.Environment.DeploymentType),
Type: types.StringValue(job.Environment.Type),
},
JobCompletionTriggerCondition: jobCompletionTriggerCondition,
}

allJobs = append(allJobs, currentJob)
}
state.Jobs = allJobs

diags := resp.State.Set(ctx, &state)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
}

func (d *jobsDataSource) Configure(
_ context.Context,
req datasource.ConfigureRequest,
_ *datasource.ConfigureResponse,
) {
if req.ProviderData == nil {
return
}

d.client = req.ProviderData.(*dbt_cloud.Client)
}
Loading

0 comments on commit 2797749

Please sign in to comment.